./PaxHeaders.20921/globus_gram_job_manager_scripts-7.30000644000000000000000000000013213765230140021032 xustar000000000000000030 mtime=1607807072.928897731 30 atime=1607807072.932897731 30 ctime=1607807072.928897731 globus_gram_job_manager_scripts-7.3/0000755000372000037200000000000013765230140020660 5ustar00travistravis00000000000000globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/build-aux0000644000000000000000000000013213765230140022567 xustar000000000000000030 mtime=1607807072.920897731 30 atime=1607807072.932897731 30 ctime=1607807072.920897731 globus_gram_job_manager_scripts-7.3/build-aux/0000755000372000037200000000000013765230140022552 5ustar00travistravis00000000000000globus_gram_job_manager_scripts-7.3/build-aux/PaxHeaders.20921/install-sh0000644000000000000000000000013213765227401024653 xustar000000000000000030 mtime=1607806721.232897731 30 atime=1607806721.232897731 30 ctime=1607807072.920897731 globus_gram_job_manager_scripts-7.3/build-aux/install-sh0000755000372000037200000003325513765227401024574 0ustar00travistravis00000000000000#!/bin/sh # install - install a program, script, or datafile scriptversion=2011-11-20.07; # UTC # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the # following copyright and license. # # Copyright (C) 1994 X Consortium # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN # AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- # TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # # Except as contained in this notice, the name of the X Consortium shall not # be used in advertising or otherwise to promote the sale, use or other deal- # ings in this Software without prior written authorization from the X Consor- # tium. # # # FSF changes to this file are in the public domain. # # Calling this script install-sh is preferred over install.sh, to prevent # 'make' implicit rules from creating a file called install from it # when there is no Makefile. # # This script is compatible with the BSD install script, but was written # from scratch. nl=' ' IFS=" "" $nl" # set DOITPROG to echo to test this script # Don't use :- since 4.3BSD and earlier shells don't like it. doit=${DOITPROG-} if test -z "$doit"; then doit_exec=exec else doit_exec=$doit fi # Put in absolute file names if you don't have them in your path; # or use environment vars. chgrpprog=${CHGRPPROG-chgrp} chmodprog=${CHMODPROG-chmod} chownprog=${CHOWNPROG-chown} cmpprog=${CMPPROG-cmp} cpprog=${CPPROG-cp} mkdirprog=${MKDIRPROG-mkdir} mvprog=${MVPROG-mv} rmprog=${RMPROG-rm} stripprog=${STRIPPROG-strip} posix_glob='?' initialize_posix_glob=' test "$posix_glob" != "?" || { if (set -f) 2>/dev/null; then posix_glob= else posix_glob=: fi } ' posix_mkdir= # Desired mode of installed file. mode=0755 chgrpcmd= chmodcmd=$chmodprog chowncmd= mvcmd=$mvprog rmcmd="$rmprog -f" stripcmd= src= dst= dir_arg= dst_arg= copy_on_change=false no_target_directory= usage="\ Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE or: $0 [OPTION]... SRCFILES... DIRECTORY or: $0 [OPTION]... -t DIRECTORY SRCFILES... or: $0 [OPTION]... -d DIRECTORIES... In the 1st form, copy SRCFILE to DSTFILE. In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. In the 4th, create DIRECTORIES. Options: --help display this help and exit. --version display version info and exit. -c (ignored) -C install only if different (preserve the last data modification time) -d create directories instead of installing files. -g GROUP $chgrpprog installed files to GROUP. -m MODE $chmodprog installed files to MODE. -o USER $chownprog installed files to USER. -s $stripprog installed files. -t DIRECTORY install into DIRECTORY. -T report an error if DSTFILE is a directory. Environment variables override the default commands: CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG " while test $# -ne 0; do case $1 in -c) ;; -C) copy_on_change=true;; -d) dir_arg=true;; -g) chgrpcmd="$chgrpprog $2" shift;; --help) echo "$usage"; exit $?;; -m) mode=$2 case $mode in *' '* | *' '* | *' '* | *'*'* | *'?'* | *'['*) echo "$0: invalid mode: $mode" >&2 exit 1;; esac shift;; -o) chowncmd="$chownprog $2" shift;; -s) stripcmd=$stripprog;; -t) dst_arg=$2 # Protect names problematic for 'test' and other utilities. case $dst_arg in -* | [=\(\)!]) dst_arg=./$dst_arg;; esac shift;; -T) no_target_directory=true;; --version) echo "$0 $scriptversion"; exit $?;; --) shift break;; -*) echo "$0: invalid option: $1" >&2 exit 1;; *) break;; esac shift done if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then # When -d is used, all remaining arguments are directories to create. # When -t is used, the destination is already specified. # Otherwise, the last argument is the destination. Remove it from $@. for arg do if test -n "$dst_arg"; then # $@ is not empty: it contains at least $arg. set fnord "$@" "$dst_arg" shift # fnord fi shift # arg dst_arg=$arg # Protect names problematic for 'test' and other utilities. case $dst_arg in -* | [=\(\)!]) dst_arg=./$dst_arg;; esac done fi if test $# -eq 0; then if test -z "$dir_arg"; then echo "$0: no input file specified." >&2 exit 1 fi # It's OK to call 'install-sh -d' without argument. # This can happen when creating conditional directories. exit 0 fi if test -z "$dir_arg"; then do_exit='(exit $ret); exit $ret' trap "ret=129; $do_exit" 1 trap "ret=130; $do_exit" 2 trap "ret=141; $do_exit" 13 trap "ret=143; $do_exit" 15 # Set umask so as not to create temps with too-generous modes. # However, 'strip' requires both read and write access to temps. case $mode in # Optimize common cases. *644) cp_umask=133;; *755) cp_umask=22;; *[0-7]) if test -z "$stripcmd"; then u_plus_rw= else u_plus_rw='% 200' fi cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; *) if test -z "$stripcmd"; then u_plus_rw= else u_plus_rw=,u+rw fi cp_umask=$mode$u_plus_rw;; esac fi for src do # Protect names problematic for 'test' and other utilities. case $src in -* | [=\(\)!]) src=./$src;; esac if test -n "$dir_arg"; then dst=$src dstdir=$dst test -d "$dstdir" dstdir_status=$? else # Waiting for this to be detected by the "$cpprog $src $dsttmp" command # might cause directories to be created, which would be especially bad # if $src (and thus $dsttmp) contains '*'. if test ! -f "$src" && test ! -d "$src"; then echo "$0: $src does not exist." >&2 exit 1 fi if test -z "$dst_arg"; then echo "$0: no destination specified." >&2 exit 1 fi dst=$dst_arg # If destination is a directory, append the input filename; won't work # if double slashes aren't ignored. if test -d "$dst"; then if test -n "$no_target_directory"; then echo "$0: $dst_arg: Is a directory" >&2 exit 1 fi dstdir=$dst dst=$dstdir/`basename "$src"` dstdir_status=0 else # Prefer dirname, but fall back on a substitute if dirname fails. dstdir=` (dirname "$dst") 2>/dev/null || expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$dst" : 'X\(//\)[^/]' \| \ X"$dst" : 'X\(//\)$' \| \ X"$dst" : 'X\(/\)' \| . 2>/dev/null || echo X"$dst" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q' ` test -d "$dstdir" dstdir_status=$? fi fi obsolete_mkdir_used=false if test $dstdir_status != 0; then case $posix_mkdir in '') # Create intermediate dirs using mode 755 as modified by the umask. # This is like FreeBSD 'install' as of 1997-10-28. umask=`umask` case $stripcmd.$umask in # Optimize common cases. *[2367][2367]) mkdir_umask=$umask;; .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; *[0-7]) mkdir_umask=`expr $umask + 22 \ - $umask % 100 % 40 + $umask % 20 \ - $umask % 10 % 4 + $umask % 2 `;; *) mkdir_umask=$umask,go-w;; esac # With -d, create the new directory with the user-specified mode. # Otherwise, rely on $mkdir_umask. if test -n "$dir_arg"; then mkdir_mode=-m$mode else mkdir_mode= fi posix_mkdir=false case $umask in *[123567][0-7][0-7]) # POSIX mkdir -p sets u+wx bits regardless of umask, which # is incompatible with FreeBSD 'install' when (umask & 300) != 0. ;; *) tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0 if (umask $mkdir_umask && exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1 then if test -z "$dir_arg" || { # Check for POSIX incompatibilities with -m. # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or # other-writable bit of parent directory when it shouldn't. # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. ls_ld_tmpdir=`ls -ld "$tmpdir"` case $ls_ld_tmpdir in d????-?r-*) different_mode=700;; d????-?--*) different_mode=755;; *) false;; esac && $mkdirprog -m$different_mode -p -- "$tmpdir" && { ls_ld_tmpdir_1=`ls -ld "$tmpdir"` test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" } } then posix_mkdir=: fi rmdir "$tmpdir/d" "$tmpdir" else # Remove any dirs left behind by ancient mkdir implementations. rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null fi trap '' 0;; esac;; esac if $posix_mkdir && ( umask $mkdir_umask && $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" ) then : else # The umask is ridiculous, or mkdir does not conform to POSIX, # or it failed possibly due to a race condition. Create the # directory the slow way, step by step, checking for races as we go. case $dstdir in /*) prefix='/';; [-=\(\)!]*) prefix='./';; *) prefix='';; esac eval "$initialize_posix_glob" oIFS=$IFS IFS=/ $posix_glob set -f set fnord $dstdir shift $posix_glob set +f IFS=$oIFS prefixes= for d do test X"$d" = X && continue prefix=$prefix$d if test -d "$prefix"; then prefixes= else if $posix_mkdir; then (umask=$mkdir_umask && $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break # Don't fail if two instances are running concurrently. test -d "$prefix" || exit 1 else case $prefix in *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; *) qprefix=$prefix;; esac prefixes="$prefixes '$qprefix'" fi fi prefix=$prefix/ done if test -n "$prefixes"; then # Don't fail if two instances are running concurrently. (umask $mkdir_umask && eval "\$doit_exec \$mkdirprog $prefixes") || test -d "$dstdir" || exit 1 obsolete_mkdir_used=true fi fi fi if test -n "$dir_arg"; then { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 else # Make a couple of temp file names in the proper directory. dsttmp=$dstdir/_inst.$$_ rmtmp=$dstdir/_rm.$$_ # Trap to clean up those temp files at exit. trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 # Copy the file name to the temp name. (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && # and set any options; do chmod last to preserve setuid bits. # # If any of these fail, we abort the whole thing. If we want to # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $cpprog $src $dsttmp" command. # { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && # If -C, don't bother to copy if it wouldn't change the file. if $copy_on_change && old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && eval "$initialize_posix_glob" && $posix_glob set -f && set X $old && old=:$2:$4:$5:$6 && set X $new && new=:$2:$4:$5:$6 && $posix_glob set +f && test "$old" = "$new" && $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 then rm -f "$dsttmp" else # Rename the file to the real destination. $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || # The rename failed, perhaps because mv can't rename something else # to itself, or perhaps because mv is so ancient that it does not # support -f. { # Now remove or move aside any old file at destination location. # We try this two ways since rm can't unlink itself on some # systems and the destination file might be busy for other # reasons. In this case, the final cleanup might fail but the new # file should still install successfully. { test ! -f "$dst" || $doit $rmcmd -f "$dst" 2>/dev/null || { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } } || { echo "$0: cannot unlink or rename $dst" >&2 (exit 1); exit 1 } } && # Now rename the file to the real destination. $doit $mvcmd "$dsttmp" "$dst" } fi || exit 1 trap '' 0 fi done # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: globus_gram_job_manager_scripts-7.3/build-aux/PaxHeaders.20921/missing0000644000000000000000000000013213765227401024246 xustar000000000000000030 mtime=1607806721.236897731 30 atime=1607806956.636897731 30 ctime=1607807072.920897731 globus_gram_job_manager_scripts-7.3/build-aux/missing0000755000372000037200000001533113765227401024162 0ustar00travistravis00000000000000#! /bin/sh # Common wrapper for a few potentially missing GNU programs. scriptversion=2012-06-26.16; # UTC # Copyright (C) 1996-2013 Free Software Foundation, Inc. # Originally written by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then echo 1>&2 "Try '$0 --help' for more information" exit 1 fi case $1 in --is-lightweight) # Used by our autoconf macros to check whether the available missing # script is modern enough. exit 0 ;; --run) # Back-compat with the calling convention used by older automake. shift ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due to PROGRAM being missing or too old. Options: -h, --help display this help and exit -v, --version output version information and exit Supported PROGRAM values: aclocal autoconf autoheader autom4te automake makeinfo bison yacc flex lex help2man Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and 'g' are ignored when checking the name. Send bug reports to ." exit $? ;; -v|--v|--ve|--ver|--vers|--versi|--versio|--version) echo "missing $scriptversion (GNU Automake)" exit $? ;; -*) echo 1>&2 "$0: unknown '$1' option" echo 1>&2 "Try '$0 --help' for more information" exit 1 ;; esac # Run the given program, remember its exit status. "$@"; st=$? # If it succeeded, we are done. test $st -eq 0 && exit 0 # Also exit now if we it failed (or wasn't found), and '--version' was # passed; such an option is passed most likely to detect whether the # program is present and works. case $2 in --version|--help) exit $st;; esac # Exit code 63 means version mismatch. This often happens when the user # tries to use an ancient version of a tool on a file that requires a # minimum version. if test $st -eq 63; then msg="probably too old" elif test $st -eq 127; then # Program was missing. msg="missing on your system" else # Program was found and executed, but failed. Give up. exit $st fi perl_URL=http://www.perl.org/ flex_URL=http://flex.sourceforge.net/ gnu_software_URL=http://www.gnu.org/software program_details () { case $1 in aclocal|automake) echo "The '$1' program is part of the GNU Automake package:" echo "<$gnu_software_URL/automake>" echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:" echo "<$gnu_software_URL/autoconf>" echo "<$gnu_software_URL/m4/>" echo "<$perl_URL>" ;; autoconf|autom4te|autoheader) echo "The '$1' program is part of the GNU Autoconf package:" echo "<$gnu_software_URL/autoconf/>" echo "It also requires GNU m4 and Perl in order to run:" echo "<$gnu_software_URL/m4/>" echo "<$perl_URL>" ;; esac } give_advice () { # Normalize program name to check for. normalized_program=`echo "$1" | sed ' s/^gnu-//; t s/^gnu//; t s/^g//; t'` printf '%s\n' "'$1' is $msg." configure_deps="'configure.ac' or m4 files included by 'configure.ac'" case $normalized_program in autoconf*) echo "You should only need it if you modified 'configure.ac'," echo "or m4 files included by it." program_details 'autoconf' ;; autoheader*) echo "You should only need it if you modified 'acconfig.h' or" echo "$configure_deps." program_details 'autoheader' ;; automake*) echo "You should only need it if you modified 'Makefile.am' or" echo "$configure_deps." program_details 'automake' ;; aclocal*) echo "You should only need it if you modified 'acinclude.m4' or" echo "$configure_deps." program_details 'aclocal' ;; autom4te*) echo "You might have modified some maintainer files that require" echo "the 'automa4te' program to be rebuilt." program_details 'autom4te' ;; bison*|yacc*) echo "You should only need it if you modified a '.y' file." echo "You may want to install the GNU Bison package:" echo "<$gnu_software_URL/bison/>" ;; lex*|flex*) echo "You should only need it if you modified a '.l' file." echo "You may want to install the Fast Lexical Analyzer package:" echo "<$flex_URL>" ;; help2man*) echo "You should only need it if you modified a dependency" \ "of a man page." echo "You may want to install the GNU Help2man package:" echo "<$gnu_software_URL/help2man/>" ;; makeinfo*) echo "You should only need it if you modified a '.texi' file, or" echo "any other file indirectly affecting the aspect of the manual." echo "You might want to install the Texinfo package:" echo "<$gnu_software_URL/texinfo/>" echo "The spurious makeinfo call might also be the consequence of" echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might" echo "want to install GNU make:" echo "<$gnu_software_URL/make/>" ;; *) echo "You might have modified some files without having the proper" echo "tools for further handling them. Check the 'README' file, it" echo "often tells you about the needed prerequisites for installing" echo "this package. You may also peek at any GNU archive site, in" echo "case some other package contains this missing '$1' program." ;; esac } give_advice "$1" | sed -e '1s/^/WARNING: /' \ -e '2,$s/^/ /' >&2 # Propagate the correct exit status (expected to be 127 for a program # not found, 63 for a program that failed due to version mismatch). exit $st # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/JobManager.html0000644000000000000000000000013213765230140023645 xustar000000000000000030 mtime=1607807072.760897731 30 atime=1607807072.712897731 30 ctime=1607807072.924897731 globus_gram_job_manager_scripts-7.3/JobManager.html0000644000372000037200000003264613765230140023566 0ustar00travistravis00000000000000

NAME

Globus::GRAM::JobManager - Base class for all Job Manager scripts

SYNOPSIS

 $manager = new Globus::GRAM::JobManager($job_description);

 $manager->log("Starting new operation");
 $manager->nfssync($fileobj,$createflag);
 $manager->respond($hashref);
 $hashref = $manager->submit();
 $hashref = $manager->poll();
 $hashref = $manager->cancel();
 $hashref = $manager->signal();
 $hashref = $manager->make_scratchdir();
 $hashref = $manager->remove_scratchdir();
 $hashref = $manager->rewrite_urls();
 $hashref = $manager->stage_in();
 $hashref = $manager->stage_out();
 $hashref = $manager->cache_cleanup();
 $hashref = $manager->remote_io_file_create();
 $hashref = $manager->proxy_relocate();
 $hashref = $manager->proxy_update();
 $scalar  = $manager->pipe_out_cmd(@arglist);
 ($stderr, $rc) = $manager->pipe_err_cmd(@arglist);
 $status  = $manager->fork_and_exec_cmd(@arglist);
 $manager->append_path($hash, $variable, $path);
 $scalar = $manager->setup_softenv();

DESCRIPTION

The Globus::GRAM::JobManager module implements the base behavior for a Job Manager script interface. Scheduler-specific job manager scripts must inherit from this module in order to be used by the job manager.

Methods

$manager = Globus::GRAM::JobManager->new($JobDescription)

Each Globus::GRAM::JobManager object is created by calling the constructor with a single argument, a Globus::GRAM::JobDescription object containing the information about the job request which the script will be modifying. Modules which subclass Globus::GRAM::JobManager MUST call the super-class's constructor, as in this code fragment:

     my $proto = shift;
     my $class = ref($proto) || $proto;
     my $self = $class->SUPER::new(@_);

     bless $self, $class;
$manager->log($string)

Log a message to the job manager log file. The message is preceded by a timestamp.

$manager->nfssync($object,$create)

Send an NFS update by touching the file (or directory) in question. If the $create is true, a file will be created. If it is false, the $object will not be created.

$manager->respond($message)

Send a response to the job manager program. The response may either be a hash reference consisting of a hash of (variable, value) pairs, which will be returned to the job manager, or an already formatted string. This only needs to be directly called by a job manager implementation when the script wants to send a partial response while processing one of the scheduler interface methods (for example, to indicate that a file has been staged).

The valid keys for a response are defined in the RESPONSES section.

$manager->submit()

Submit a job request to the scheduler. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to submit the job to the scheduler.

A scheduler which implements this method should return a hash reference containing a scheduler-specific job identifier as the value of the hash's JOB_ID key, and optionally, the a GRAM job state as the value of the hash's JOB_STATE key if the job submission was successful; otherwise a Globus::GRAM::Error value should be returned. The job state values are defined in the Globus::GRAM::JobState module. The job parameters (as found in the job rsl) are defined in Globus::GRAM::Jobdescription object in $self->{JobDescription}.

For example:

    return {JOB_STATE => Globus::GRAM::JobState::PENDING,
            JOB_ID => $job_id};
$manager->poll()

Poll a job's status. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to poll the scheduler.

A scheduler which implements this method should return a hash reference containing the JOB_STATE value. The job's ID can be accessed by calling the $self->{JobDescription}->jobid() method.

$manager->cancel()

Cancel a job. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to remove the job from the scheduler.

A scheduler which implements this method should return a hash reference containing the JOB_STATE value. The job's ID can be accessed by calling the $self->{JobDescription}->jobid() method.

$manager->signal()

Signal a job. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to remove the job from the scheduler. The JobManager module can determine the job's ID, the signal number, and the (optional) signal arguments from the Job Description by calling it's job_id(), signal(), and and signal_arg() methods, respectively.

Depending on the signal, it may be appropriate for the JobManager object to return a hash reference containing a JOB_STATE update.

$manager->make_scratchdir()

Create a scratch directory for a job. The scratch directory location is based on the JobDescription's scratch_dir_base() and scratch_dir() methods.

If the scratch_dir() value is a relative path, then a directory will be created as a subdirectory of scratch_dir_base()/scratch_dir(), otherwise, it will be created as a subdirectory of scratch_dir(). This method will return a hash reference containing mapping SCRATCH_DIR to the absolute path of newly created scratch directory if successful.

$manager->make_scratchdir()

Delete a job's scratch directory. All files and subdirectories of the JobDescription's scratch_directory() will be deleted.

$manager->make_scratchdir()

Delete some job-related files. All files listed in the JobDescription's file_cleanup() array will be deleted.

$manager->rewrite_urls()

Looks up URLs listed in the JobDescription's stdin() and executable(), and replaces them with paths to locally cached copies.

$manager->stage_in()

Stage input files need for the job from remote storage. The files to be staged are defined by the array of [URL, path] pairs in the job description's file_stage_in() and file_stage_in_shared() methods. The Globus::GRAM::JobManager module provides an implementation of this functionality using the globus-url-copy and globus-gass-cache programs. Files which are staged in are not automatically removed when the job terminates.

This function returns intermediate responses using the Globus::GRAM::JobManager::response() method to let the job manager know when each individual file has been staged.

$manager->stage_out()

Stage output files generated by this job to remote storage. The files to be staged are defined by the array of [URL, destination] pairs in the job description's file_stage_out() method. The Globus::GRAM::JobManager module provides an implementation of this functionality using the globus-url-copy program. Files which are staged out are not removed by this method.

$manager->cache_cleanup()

Clean up cache references in the GASS which match this job's cache tag .

$manager->remote_io_file_create()

Create the remote I/O file in the job dir which will contain the remote_io_url RSL attribute's value.

$manager->proxy_relocate()

Relocate the delegated proxy for job execution. Job Managers need to override the default if they intend to relocate the proxy into some common file system other than the cache. The job manager program does not depend on the new location of the proxy. Job Manager modules must not remove the default proxy.

$hashref = $manager->proxy_update();
$manager->append_path($ref, $var, $path)

Append $path to the value of $ref->{$var}, dealing with the case where $ref->{$var} is not yet defined.

$manager->pipe_out_cmd(@arg)

Create a new process to run the first argument application with the remaining arguments (which may be empty). No shell metacharacter will be evaluated, avoiding a shell invocation. Stderr is redirected to /dev/null and stdout is being captured by the parent process, which is also the result returned. In list mode, all lines are returned, in scalar mode, only the first line is being returned. The line termination character is already cut off. Use this function as more efficient backticks, if you do not need shell metacharacter evaluation.

Caution: This function deviates in two manners from regular backticks. Firstly, it chomps the line terminator from the output. Secondly, it returns only the first line in scalar context instead of a multiline concatinated string. As with regular backticks, the result may be undefined in scalar context, if no result exists.

A child error code with an exit code of 127 indicates that the application could not be run. The scalar result returned by this function is usually undef'ed in this case.

($stder, $rc) = $manager->pipe_err_cmd(@arg)

Create a new process to run the first argument application with the remaining arguments (which may be empty). No shell metacharacter will be evaluated, avoiding a shell invocation.

This method returns a list of two items, the standard error of the program, and the exit code of the program. If the error code is 127, then the application could not be run. Standard output is discarded.

$manager->fork_and_exec_cmd(@arg)

Fork off a child to run the first argument in the list. Remaining arguments will be passed, but shell interpolation is avoided. Signals SIGINT and SIGQUIT are ignored in the child process. Stdout is appended to /dev/null, and stderr is dup2 from stdout. The parent waits for the child to finish, and returns the value for the CHILD_ERROR variable as result. Use this function as more efficient system() call, if you can do not need shell metacharacter evaluation.

Note that the inability to execute the program will result in a status code of 127.

$manager->job_dir()

Return the temporary directory to store job-related files, which have no need for file caching.

$manager->setup_softenv()

Either add a line to the specified command script file handle to load the user's default SoftEnv configuration, or create a custom SoftEnv script and add commands to the specified command script file handle to load it.

RESPONSES

When returning from a job interface method, or when sending an intermediate response via the response() method, the following hash keys are valid:

  • JOB_STATE

    An integer job state value. These are enumerated in the Globus::GRAM::JobState module.

  • ERROR

    An integer error code. These are enumerated in the Globus::GRAM::Error module.

  • JOB_ID

    A string containing a job identifier, which can be used to poll, cancel, or signal a job in progress. This response should only be returned by the submit method.

  • SCRATCH_DIR

    A string containing the path to a newly-created scratch directory. This response should only be returned by the make_scratchdir method.

  • STAGED_IN

    A string containing the (URL, path) pair for a file which has now been staged in. This response should only be returned by the stage_in method.

  • STAGED_IN_SHARED

    A string containing the (URL, path) pair for a file which has now been staged in and symlinked from the cache. This response should only be returned by the stage_in_shared method.

  • STAGED_OUT

    A string containing the (path, URL) pair for a file which has now been staged out by the script. This response should only be returned by the stage_out method.

globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/globus-gram-job-manager-scripts-uninstalled.pc.0000644000000000000000000000013213765226605031773 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806957.692897731 30 ctime=1607807072.916897731 globus_gram_job_manager_scripts-7.3/globus-gram-job-manager-scripts-uninstalled.pc.in0000664000372000037200000000036613765226605032237 0ustar00travistravis00000000000000prefix=@prefix@ exec_prefix=@exec_prefix@ libdir=@libdir@ includedir=@includedir@ path=@abs_builddir@ perl5lib=@abs_builddir@ Name: globus-gram-job-manager-scripts Description: Grid Community Toolkit - GRAM Job ManagerScripts Version: @VERSION@ globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/JobDescription.pm0000644000000000000000000000013213765226605024241 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607807072.796897731 30 ctime=1607807072.920897731 globus_gram_job_manager_scripts-7.3/JobDescription.pm0000664000372000037200000004433513765226605024162 0ustar00travistravis00000000000000# # Globus::GRAM::JobDescription # # CVS Information # $Source$ # $Date$ # $Revision$ # $Author$ use Globus::Core::Paths; =head1 NAME Globus::GRAM::JobDescription - GRAM Job Description Globus::GRAM::DefaultHandlingJobDescription - GRAM Job Description with relative path handling =head1 SYNOPSIS use Globus::GRAM::JobDescription; $hash = { executable => [ '/bin/echo' ], arguments => [ 'hello' ] }; $description = new Globus::GRAM::JobDescription($filename); $description = new Globus::GRAM::JobDescription($hash); $executable = $description->executable(); $description->add($new_attribute, $new_value); $description->save(); $description->save($filename); $description->print_recursive($file_handle); =head1 DESCRIPTION This object contains the parameters of a job request in a simple object wrapper. The object may be queried to determine the value of any RSL parameter, may be updated with new parameters, and may be saved in the filesystem for later use. =head2 Methods =over 4 =cut package Globus::GRAM::JobDescription; =item new Globus::GRAM::JobDescription(I<$filename>) A JobDescription is constructed from a file consisting of a Perl hash of parameter => array mappings. Every value in the Job Description is stored internally as an array, even single literals, similar to the way an RSL tree is parsed in C. An example of such a file is $description = { executable => [ '/bin/echo' ], arguments => [ 'hello', 'world' ], environment => [ [ 'GLOBUS_GRAM_JOB_CONTACT', 'https://globus.org:1234/2345/4332' ] ] }; which corresponds to the rsl fragment &(executable = /bin/echo) (arguments = hello world) (environment = (GLOBUS_GRAM_JOB_CONTACT 'https://globus.org:1234/2345/4332') ) When the library_path RSL attribute is specified, this object modifies the environment RSL attribute value to append its value to any system specific variables. =cut sub new { my $proto = shift; my $class = ref($proto) || $proto; my $desc = shift; my $self = {}; if (defined ($desc)) { if ( ref ( $desc ) eq "HASH" ) { foreach my $Key ( keys %{$desc} ) { $self->{$Key} = $desc->{$Key}; } } else { my $desc_fn = $desc; $self = require "$desc_fn"; $self->{_description_file} = $desc_fn; } } bless $self, $class; if ($self->expand_globus_home()) { my $home = (getpwuid($<))[7]; foreach my $key (keys %{$self}) { if ($key =~ m/^[^_]/) { my $arrayref = $self->{$key}; for ($i = 0; $i < scalar(@{$arrayref}); $i++) { $arrayref->[$i] =~ s/\$\{GLOBUS_USER_HOME\}/$home/g; } } } } if ($self->expand_globus_location()) { my $home; if (exists $ENV{GLOBUS_LOCATION}) { $home = $ENV{GLOBUS_LOCATION}; } else { $home = $Globus::Core::Paths::exec_prefix; } foreach my $key (keys %{$self}) { if ($key =~ m/^[^_]/) { my $arrayref = $self->{$key}; for ($i = 0; $i < scalar(@{$arrayref}); $i++) { $arrayref->[$i] =~ s/\$\{GLOBUS_LOCATION\}/$home/g; } } } } $self->fix_library_path_environment(); return $self; } =item $description->I('name', I<$value>); Add a parameter to a job description. The parameter will be normalized internally so that the access methods described below will work with this new parameter. As an example, $description->add('new_attribute', $new_value) will create a new attribute in the JobDescription, which can be accessed by calling the I<$description->new_attribute>() method. =cut sub add { my $self = shift; my $attr = shift; my $value = shift; $attr =~ s/_//g; $attr = lc($attr); if ( ref($value) eq 'ARRAY' ) { $self->{$attr} = $value; } else { $self->{$attr} = [ $value ]; } } =item I<$value> $description->I('name'); Get a parameter from a job description. As an example, $description->get('attribute') will return the appropriate attribute in the JobDescription by name. =cut sub get { my $self = shift; my $attr = shift; $attr =~ s/_//g; $attr = lc($attr); return $self->{$attr}; } =item $description->I([$filename]) Save the JobDescription, including any added parameters, to the file named by $filename if present, or replacing the file used in constructing the object. =cut sub save { my $self = shift; my $filename = shift || "$self->{_description_file}.new"; local(*OUT); # protect if ( open( OUT, '>' . $filename ) ) { print OUT '$description = {', "\n"; foreach ( keys %{$self} ) { print OUT ' \'', $_, '\' => '; $self->print_recursive( \*OUT, $self->{$_} ); print OUT ",\n"; } print OUT "};\n"; close(OUT); } else { # FIXME: what shall we do, if we cannot open the file? } if ( exists($self->{_description_file}) ) { if ( $filename eq "$self->{_description_file}.new" ) { rename("$self->{_description_file}.new", $self->{_description_file}); } } return 0; } =item $description->I($file_handle) Write the value of the job description object to the file handle specified in the argument list. =cut sub print_recursive { my $self = shift; my $fh = shift; # with ..::File, \*FILE or *FILE{IO} my $value = shift; my $first = 1; if ( ref($value) eq 'SCALAR' ) { print $fh $value; } elsif(ref($value) eq 'ARRAY') { print $fh '[ '; foreach (@{$value}) { print $fh ', ' if (!$first); $first = 0; $self->print_recursive($fh, $_); } print $fh ' ]'; } elsif(ref($value) eq 'HASH') { print $fh '('; foreach (keys %{$value}) { print $fh ', ' if (!$first); $first = 0; print $fh "'$_' => "; $self->print_recursive($fh, $value->{$_}); } print $fh ')'; } elsif(!ref($value)) { $value =~ s|'|\\'|g; print $fh "'$value'"; } return; } =item $description->I() For any parameter defined in the JobDescription can be accessed by calling the method named by the parameter. The method names are automatically created when the JobDescription is created, and may be invoked with arbitrary SillyCaps or underscores. That is, the parameter gram_myjob may be accessed by the GramMyJob, grammyjob, or gram_my_job method names (and others). If the attributes does not in this object, then undef will be returned. In a list context, this returns the list of values associated with an attribute. In a scalar context, if the attribute's value consist of a single literal, then that literal will be returned, otherwise undef will be returned. For example, from a JobDescription called $d constructed from a description file containing { executable => [ '/bin/echo' ], arguments => [ 'hello', 'world' ] } The following will hold: $executable = $d->executable() # '/bin/echo' $arguments = $d->arguments() # undef @executable = $d->executable() # ('/bin/echo') @arguments = $d->arguments() # ('hello', 'world') $not_present = $d->not_present() # undef @not_present = $d->not_present() # () To test for existence of a value: @not_present = $d->not_present() print "Not defined\n" if(!defined($not_present[0])); =cut sub trim($$) { local($_) = shift; # the value my $preset = shift; # hash ref my $ch = substr($_,0,1); if ( $ch eq '"' ) { # value in dquotes $_ = substr($_,1,-1); } elsif ( $ch eq '\'' ) { # value in squotes, no substitutions return substr($_,1,-1); } else { # unquoted value, trim whitespaces s/^\s+//; s/\s+$//; } if ( ref($preset) eq 'HASH' ) { # substitute $VAR variables, or keep $VAR s/\$(\w+)/(exists $preset->{$1} ? $preset->{$1} : "\$$1")/egx; # substitute ${VAR} variables, or keep ${VAR} s/\$\{([^}]+)\}/(exists $preset->{$1} ? $preset->{$1} : "\${$1}")/egx; } # done return $_; } # Simple helper function to process a single line from one of the OSG # attributes files into a key-value pair. Returns (undef, undef) if the # line is not valid. sub parse_osg_attributes_line { $_ = shift; s/[\r\n]*$//; # safe chomp s/\#.*$//; # remove comments s/^\s+//; # remove initial whitespace s/\s+$//; # remove trailing whitespace # Reject lines that are empty, begin with 'export', or lack '=' if (($_ eq '') or m/^export/ or (index($_, '=') == -1)) { return (undef, undef); } # split into only two parts at the first equals sign # $k will become the variable name, and $v the raw value return split(/=/, $_, 2); } # We override the autohandler for environment so we can tack on # stuff from osg-attributes.conf sub environment { my $self = shift; local(*INFO); # return if missing, part 1 return ( wantarray ? () : undef ) unless ref $self; # slurp gridinfo file my %result = (); # map key to value if ( exists $self->{'_osg_info'} && ref($self->{'_osg_info'}) eq 'HASH' ) { # use instance knowledge - avoid reading the file again %result = %{ $self->{'_osg_info'} }; } else { my %preset = ( %ENV ); # as meager as it may be # PATH is no longer in the present environment, now that we don't use xinetd $result{"PATH"} = "/bin:/usr/bin"; # no previous knowledge, need to read the file my $fn = "/var/lib/osg/osg-job-environment.conf"; if ( open( INFO, "<$fn" ) ) { my ($k,$v); while ( ) { ($k,$v) = parse_osg_attributes_line($_); next unless defined $k; # substitute and unquote the value, remember it $result{$k} = $preset{$k} = trim( $v, \%preset ); } close INFO; } # Now do the same thing for the "local" file my $local_fn = "/var/lib/osg/osg-local-job-environment.conf"; if ( open( INFO, "<$local_fn" ) ) { my ($k,$v); while ( ) { ($k,$v) = parse_osg_attributes_line($_); next unless defined $k; # substitute and unquote the value, remember it $result{$k} = $preset{$k} = trim( $v, \%preset ); } close INFO; } # remember for next invocation in this instance # Note: If the file was unreadible, this is negative caching. $self->{'_osg_info'} = { %result }; } # return if missing, part 2 # this has been rewritten to not include job environments, if missing # we still need to return the osg-attributes environment, though. if ( exists $self->{environment} ) { # merge with job/user environment (higher prio) foreach my $t ( @{$self->{environment}} ) { if(ref($t) && scalar(@$t) == 2) { if (exists $result{$t->[0]}) { delete($result{$t->[0]}); } } } } # Add our non-overridden variables to the environment list foreach my $key ( keys %result ) { push( @{$self->{environment}}, [ $key, $result{$key} ] ); } # return in a way requested by caller if ( wantarray ) { return @{$self->{environment}}; } else { my @result = @{$self->{environment}}; if ( @{$self->{environment}} == 1 && ! ref(${@{$self->environment}}[0]) ) { return $result[0]; } else { return undef; } } } sub AUTOLOAD { use vars qw($AUTOLOAD); my $self = shift; my $name = $AUTOLOAD; $name =~ s/.*://; $name =~ s/_//g; $name = lc($name); goto &environment if $name eq "environment"; if((! ref($self)) ||(! exists($self->{$name}))) { if(wantarray) { return (); } else { return undef; } } if(wantarray) { # Return a list containing the contents of the value array for # this attribute. # This makes things like $description->environment() act as expected. return @{$self->{$name}}; } elsif(scalar(@{$self->{$name}}) == 1 && !ref($self->{$name}[0])) { # If there is only a single value in the value array for this # attribute, return that value # This makes things like $description->directory() act as expected. return @{$self->{$name}}[0]; } else { return undef; } } # Internal method to merge the library_path RSL attribute and any values in the # environment RSL attribute which explicitly name system library path variables. # The result will be modifications to the environment RSL attribute value # with the library_path values appended to any existing system-specific library # path settings in the original RSL. For example # if we found # &(environment = (LD_LIBRARY_PATH foo)) # (library_path = bar) # in the RSL, and LD_LIBRARY_PATH was one of the system-specific library paths # for this OS, we'll modify the RSL to be # &(environment = (LD_LIBRARY_PATH foo:bar)) # (library_path = bar) # # The $library_map values are mostly based on # http://www.fortran-2000.com/ArnaudRecipes/sharedlib.html # and also LD_LIBRARY_PATH for some popular BSDs sub fix_library_path_environment { my $self = shift; my @environment = $self->environment(); my $library_map = { 'linux' => [ 'LD_LIBRARY_PATH'], 'hpux' => [ 'SHLIB_PATH', 'LD_LIBRARY_PATH' ], 'solaris' => [ 'LD_LIBRARY_PATH', 'LD_LIBRARY_PATH_64' ], 'aix' => [ 'LIBPATH' ], 'irix' => [ 'LD_LIBRARY_PATH', 'LD_LIBRARYN32_PATH', 'LD_LIBRARY64_PATH' ], 'darwin' => [ 'DYLD_LIBRARY_PATH' ], 'freebsd' => [ 'LD_LIBRARY_PATH' ], 'openbsd' => [ 'LD_LIBRARY_PATH' ] }; my $library_path = join(':', $self->library_path()); # Only bother doing anything if the library_path RSL attribute is # present, and we know something about how the OS finds dynamic libraries # $^O is The name of the operating system under which this copy of Perl # was built if ($library_path ne '' && exists($library_map->{$^O})) { foreach my $var (@{$library_map->{$^O}}) { # environment is an list of [ $name, $value ] pairs. This pulls # out the value that matches the current OS-specific envvar name my @libref = grep { $_->[0] eq $var } @environment; if (exists $libref[0]) { # user specified both environment=($var ...) and # library_path=$library_path so we'll append $library_path # to the corresponding environment variable definition $libref[0]->[1] .= ":$library_path"; } else { # user didn't specify both library_path and # environment=($var $library_path), so we just add it to the # environment push(@environment, [$var, $library_path]); } } # @environment is a list of references so modifications above will # modify the RSL; however, if we add new references (the else case # above), they won't be in the list in this object. $self->add('environment', \@environment); } } =back =cut package Globus::GRAM::DefaultHandlingJobDescription; our @ISA = qw(Globus::GRAM::JobDescription); sub directory { my $self = shift; my $dir = $self->SUPER::directory(); if ($dir =~ m|^[^/]|) { $dir = "$ENV{HOME}/$dir"; } return $dir; } sub executable { my $self = shift; my $exe = $self->SUPER::executable(); if (ref($exe) || $exe =~ m|://|) { return $exe; } if ($exe =~ m|^[^/]|) { $exe = $self->directory() . "/$exe"; } return $exe; } sub stdin { my $self = shift; my $stdin = $self->SUPER::stdin(); if (ref $stdin || $stdin =~ m|://|) { return $stdin; } if ($stdin =~ m|^[^/]|) { $stdin = $self->directory() . "/$stdin"; } return $stdin; } sub stdout { my $self = shift; my @stdout = $self->SUPER::stdout(); if (scalar(@stdout) > 1 || ref($stdout[0])) { return @stdout; } my $stdout = $stdout[0]; if (ref $stdout || $stdout =~ m|://|) { return $stdout; } if ($stdout =~ m|^[^/]|) { $stdout = $self->directory() . "/$stdout"; } return $stdout; } sub stderr { my $self = shift; my @stderr = $self->SUPER::stderr(); if (scalar(@stderr) > 1 || ref($stderr[0])) { return @stderr; } my $stderr = $stderr[0]; if (ref $stderr || $stderr =~ m|://|) { return $stderr; } if ($stderr =~ m|^[^/]|) { $stderr = $self->directory() . "/$stderr"; } return $stderr; } sub max_cpu_time { my $self = shift; my $max_cpu_time = $self->SUPER::max_cpu_time(); if (! $max_cpu_time) { $max_cpu_time = $self->max_time(); } $max_cpu_time = 0 if (! $max_cpu_time); return $max_cpu_time; } sub max_wall_time { my $self = shift; my $max_wall_time = $self->SUPER::max_wall_time(); if (! $max_wall_time) { $max_wall_time = $self->max_time(); } $max_wall_time = 0 if (! $max_wall_time); return $max_wall_time; } sub get($$) { my $self = shift; my $name = shift; $name =~ s/_//g; $name = lc($name); if ($name eq 'directory') { return $self->directory(); } elsif ($name eq 'executable') { return $self->executable(); } elsif ($name eq 'stdin') { return $self->stdin(); } elsif ($name eq 'stdout') { return $self->stdout(); } elsif ($name eq 'stderr') { return $self->stderr(); } elsif ($name eq 'max_cpu_time') { return $self->max_cpu_time(); } elsif ($name eq 'max_wall_time') { return $self->max_wall_time(); } else { return $self->SUPER::get($name); } } 1; __END__ # vim: filetype=perl : globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/JobManager.3pm0000644000000000000000000000013213765230140023400 xustar000000000000000030 mtime=1607807072.852897731 30 atime=1607807072.804897731 30 ctime=1607807072.924897731 globus_gram_job_manager_scripts-7.3/JobManager.3pm0000644000372000037200000004454713765230140023324 0ustar00travistravis00000000000000.\" Automatically generated by Pod::Man 2.27 (Pod::Simple 3.28) .\" .\" Standard preamble: .\" ======================================================================== .de Sp \" Vertical space (when we can't use .PP) .if t .sp .5v .if n .sp .. .de Vb \" Begin verbatim text .ft CW .nf .ne \\$1 .. .de Ve \" End verbatim text .ft R .fi .. .\" Set up some character translations and predefined strings. \*(-- will .\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left .\" double quote, and \*(R" will give a right double quote. \*(C+ will .\" give a nicer C++. Capital omega is used to do unbreakable dashes and .\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff, .\" nothing in troff, for use with C<>. .tr \(*W- .ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p' .ie n \{\ . ds -- \(*W- . ds PI pi . if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch . if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch . ds L" "" . ds R" "" . ds C` "" . ds C' "" 'br\} .el\{\ . ds -- \|\(em\| . ds PI \(*p . ds L" `` . ds R" '' . ds C` . ds C' 'br\} .\" .\" Escape single quotes in literal strings from groff's Unicode transform. .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" .\" If the F register is turned on, we'll generate index entries on stderr for .\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index .\" entries marked with X<> in POD. Of course, you'll have to process the .\" output yourself in some meaningful fashion. .\" .\" Avoid warning from groff about undefined register 'F'. .de IX .. .nr rF 0 .if \n(.g .if rF .nr rF 1 .if (\n(rF:(\n(.g==0)) \{ . if \nF \{ . de IX . tm Index:\\$1\t\\n%\t"\\$2" .. . if !\nF==2 \{ . nr % 0 . nr F 2 . \} . \} .\} .rr rF .\" .\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2). .\" Fear. Run. Save yourself. No user-serviceable parts. . \" fudge factors for nroff and troff .if n \{\ . ds #H 0 . ds #V .8m . ds #F .3m . ds #[ \f1 . ds #] \fP .\} .if t \{\ . ds #H ((1u-(\\\\n(.fu%2u))*.13m) . ds #V .6m . ds #F 0 . ds #[ \& . ds #] \& .\} . \" simple accents for nroff and troff .if n \{\ . ds ' \& . ds ` \& . ds ^ \& . ds , \& . ds ~ ~ . ds / .\} .if t \{\ . ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u" . ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u' . ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u' . ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u' . ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u' . ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u' .\} . \" troff and (daisy-wheel) nroff accents .ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V' .ds 8 \h'\*(#H'\(*b\h'-\*(#H' .ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#] .ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H' .ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u' .ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#] .ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#] .ds ae a\h'-(\w'a'u*4/10)'e .ds Ae A\h'-(\w'A'u*4/10)'E . \" corrections for vroff .if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u' .if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u' . \" for low resolution devices (crt and lpr) .if \n(.H>23 .if \n(.V>19 \ \{\ . ds : e . ds 8 ss . ds o a . ds d- d\h'-1'\(ga . ds D- D\h'-1'\(hy . ds th \o'bp' . ds Th \o'LP' . ds ae ae . ds Ae AE .\} .rm #[ #] #H #V #F C .\" ======================================================================== .\" .IX Title "JobManager 3pm" .TH JobManager 3pm "2020-12-12" "perl v5.16.3" "User Contributed Perl Documentation" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l .nh .SH "NAME" Globus::GRAM::JobManager \- Base class for all Job Manager scripts .SH "SYNOPSIS" .IX Header "SYNOPSIS" .Vb 1 \& $manager = new Globus::GRAM::JobManager($job_description); \& \& $manager\->log("Starting new operation"); \& $manager\->nfssync($fileobj,$createflag); \& $manager\->respond($hashref); \& $hashref = $manager\->submit(); \& $hashref = $manager\->poll(); \& $hashref = $manager\->cancel(); \& $hashref = $manager\->signal(); \& $hashref = $manager\->make_scratchdir(); \& $hashref = $manager\->remove_scratchdir(); \& $hashref = $manager\->rewrite_urls(); \& $hashref = $manager\->stage_in(); \& $hashref = $manager\->stage_out(); \& $hashref = $manager\->cache_cleanup(); \& $hashref = $manager\->remote_io_file_create(); \& $hashref = $manager\->proxy_relocate(); \& $hashref = $manager\->proxy_update(); \& $scalar = $manager\->pipe_out_cmd(@arglist); \& ($stderr, $rc) = $manager\->pipe_err_cmd(@arglist); \& $status = $manager\->fork_and_exec_cmd(@arglist); \& $manager\->append_path($hash, $variable, $path); \& $scalar = $manager\->setup_softenv(); .Ve .SH "DESCRIPTION" .IX Header "DESCRIPTION" The Globus::GRAM::JobManager module implements the base behavior for a Job Manager script interface. Scheduler-specific job manager scripts must inherit from this module in order to be used by the job manager. .SS "Methods" .IX Subsection "Methods" .ie n .IP "$manager = Globus::GRAM::JobManager\->new($JobDescription)" 4 .el .IP "\f(CW$manager\fR = Globus::GRAM::JobManager\->new($JobDescription)" 4 .IX Item "$manager = Globus::GRAM::JobManager->new($JobDescription)" Each Globus::GRAM::JobManager object is created by calling the constructor with a single argument, a Globus::GRAM::JobDescription object containing the information about the job request which the script will be modifying. Modules which subclass Globus::GRAM::JobManager \s-1MUST\s0 call the super-class's constructor, as in this code fragment: .Sp .Vb 3 \& my $proto = shift; \& my $class = ref($proto) || $proto; \& my $self = $class\->SUPER::new(@_); \& \& bless $self, $class; .Ve .ie n .IP "$manager\->log($string)" 4 .el .IP "\f(CW$manager\fR\->log($string)" 4 .IX Item "$manager->log($string)" Log a message to the job manager log file. The message is preceded by a timestamp. .ie n .IP "$manager\->nfssync($object,$create)" 4 .el .IP "\f(CW$manager\fR\->nfssync($object,$create)" 4 .IX Item "$manager->nfssync($object,$create)" Send an \s-1NFS\s0 update by touching the file (or directory) in question. If the \&\f(CW$create\fR is true, a file will be created. If it is false, the \f(CW$object\fR will not be created. .ie n .IP "$manager\->respond($message)" 4 .el .IP "\f(CW$manager\fR\->respond($message)" 4 .IX Item "$manager->respond($message)" Send a response to the job manager program. The response may either be a hash reference consisting of a hash of (variable, value) pairs, which will be returned to the job manager, or an already formatted string. This only needs to be directly called by a job manager implementation when the script wants to send a partial response while processing one of the scheduler interface methods (for example, to indicate that a file has been staged). .Sp The valid keys for a response are defined in the \s-1RESPONSES\s0 section. .ie n .IP "$manager\->\fIsubmit()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIsubmit()\fR" 4 .IX Item "$manager->submit()" Submit a job request to the scheduler. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to submit the job to the scheduler. .Sp A scheduler which implements this method should return a hash reference containing a scheduler-specific job identifier as the value of the hash's \&\s-1JOB_ID\s0 key, and optionally, the a \s-1GRAM\s0 job state as the value of the hash's \&\s-1JOB_STATE\s0 key if the job submission was successful; otherwise a Globus::GRAM::Error value should be returned. The job state values are defined in the Globus::GRAM::JobState module. The job parameters (as found in the job rsl) are defined in Globus::GRAM::Jobdescription object in \f(CW$self\fR\->{JobDescription}. .Sp For example: .Sp .Vb 2 \& return {JOB_STATE => Globus::GRAM::JobState::PENDING, \& JOB_ID => $job_id}; .Ve .ie n .IP "$manager\->\fIpoll()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIpoll()\fR" 4 .IX Item "$manager->poll()" Poll a job's status. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to poll the scheduler. .Sp A scheduler which implements this method should return a hash reference containing the \s-1JOB_STATE\s0 value. The job's \s-1ID\s0 can be accessed by calling the \&\f(CW$self\fR\->{JobDescription}\->\fIjobid()\fR method. .ie n .IP "$manager\->\fIcancel()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIcancel()\fR" 4 .IX Item "$manager->cancel()" Cancel a job. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to remove the job from the scheduler. .Sp A scheduler which implements this method should return a hash reference containing the \s-1JOB_STATE\s0 value. The job's \s-1ID\s0 can be accessed by calling the \&\f(CW$self\fR\->{JobDescription}\->\fIjobid()\fR method. .ie n .IP "$manager\->\fIsignal()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIsignal()\fR" 4 .IX Item "$manager->signal()" Signal a job. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to remove the job from the scheduler. The JobManager module can determine the job's \s-1ID,\s0 the signal number, and the (optional) signal arguments from the Job Description by calling it's \fIjob_id()\fR, \fIsignal()\fR, and and \fIsignal_arg()\fR methods, respectively. .Sp Depending on the signal, it may be appropriate for the JobManager object to return a hash reference containing a \s-1JOB_STATE\s0 update. .ie n .IP "$manager\->\fImake_scratchdir()\fR" 4 .el .IP "\f(CW$manager\fR\->\fImake_scratchdir()\fR" 4 .IX Item "$manager->make_scratchdir()" Create a scratch directory for a job. The scratch directory location is based on the JobDescription's \fIscratch_dir_base()\fR and \fIscratch_dir()\fR methods. .Sp If the \fIscratch_dir()\fR value is a relative path, then a directory will be created as a subdirectory of \fIscratch_dir_base()\fR/\fIscratch_dir()\fR, otherwise, it will be created as a subdirectory of \fIscratch_dir()\fR. This method will return a hash reference containing mapping \s-1SCRATCH_DIR\s0 to the absolute path of newly created scratch directory if successful. .ie n .IP "$manager\->\fImake_scratchdir()\fR" 4 .el .IP "\f(CW$manager\fR\->\fImake_scratchdir()\fR" 4 .IX Item "$manager->make_scratchdir()" Delete a job's scratch directory. All files and subdirectories of the JobDescription's \fIscratch_directory()\fR will be deleted. .ie n .IP "$manager\->\fImake_scratchdir()\fR" 4 .el .IP "\f(CW$manager\fR\->\fImake_scratchdir()\fR" 4 .IX Item "$manager->make_scratchdir()" Delete some job-related files. All files listed in the JobDescription's \&\fIfile_cleanup()\fR array will be deleted. .ie n .IP "$manager\->\fIrewrite_urls()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIrewrite_urls()\fR" 4 .IX Item "$manager->rewrite_urls()" Looks up URLs listed in the JobDescription's \fIstdin()\fR and \fIexecutable()\fR, and replaces them with paths to locally cached copies. .ie n .IP "$manager\->\fIstage_in()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIstage_in()\fR" 4 .IX Item "$manager->stage_in()" Stage input files need for the job from remote storage. The files to be staged are defined by the array of [\s-1URL,\s0 path] pairs in the job description's \fIfile_stage_in()\fR and \fIfile_stage_in_shared()\fR methods. The Globus::GRAM::JobManager module provides an implementation of this functionality using the globus-url-copy and globus-gass-cache programs. Files which are staged in are not automatically removed when the job terminates. .Sp This function returns intermediate responses using the \&\fIGlobus::GRAM::JobManager::response()\fR method to let the job manager know when each individual file has been staged. .ie n .IP "$manager\->\fIstage_out()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIstage_out()\fR" 4 .IX Item "$manager->stage_out()" Stage output files generated by this job to remote storage. The files to be staged are defined by the array of [\s-1URL,\s0 destination] pairs in the job description's \fIfile_stage_out()\fR method. The Globus::GRAM::JobManager module provides an implementation of this functionality using the globus-url-copy program. Files which are staged out are not removed by this method. .ie n .IP "$manager\->\fIcache_cleanup()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIcache_cleanup()\fR" 4 .IX Item "$manager->cache_cleanup()" Clean up cache references in the \s-1GASS\s0 which match this job's cache tag . .ie n .IP "$manager\->\fIremote_io_file_create()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIremote_io_file_create()\fR" 4 .IX Item "$manager->remote_io_file_create()" Create the remote I/O file in the job dir which will contain the remote_io_url \s-1RSL\s0 attribute's value. .ie n .IP "$manager\->\fIproxy_relocate()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIproxy_relocate()\fR" 4 .IX Item "$manager->proxy_relocate()" Relocate the delegated proxy for job execution. Job Managers need to override the default if they intend to relocate the proxy into some common file system other than the cache. The job manager program does not depend on the new location of the proxy. Job Manager modules must not remove the default proxy. .ie n .IP "$hashref = $manager\->\fIproxy_update()\fR;" 4 .el .IP "\f(CW$hashref\fR = \f(CW$manager\fR\->\fIproxy_update()\fR;" 4 .IX Item "$hashref = $manager->proxy_update();" .PD 0 .ie n .IP "$manager\->append_path($ref, $var, $path)" 4 .el .IP "\f(CW$manager\fR\->append_path($ref, \f(CW$var\fR, \f(CW$path\fR)" 4 .IX Item "$manager->append_path($ref, $var, $path)" .PD Append \f(CW$path\fR to the value of \f(CW$ref\fR\->{$var}, dealing with the case where \&\f(CW$ref\fR\->{$var} is not yet defined. .ie n .IP "$manager\->pipe_out_cmd(@arg)" 4 .el .IP "\f(CW$manager\fR\->pipe_out_cmd(@arg)" 4 .IX Item "$manager->pipe_out_cmd(@arg)" Create a new process to run the first argument application with the remaining arguments (which may be empty). No shell metacharacter will be evaluated, avoiding a shell invocation. Stderr is redirected to /dev/null and stdout is being captured by the parent process, which is also the result returned. In list mode, all lines are returned, in scalar mode, only the first line is being returned. The line termination character is already cut off. Use this function as more efficient backticks, if you do not need shell metacharacter evaluation. .Sp Caution: This function deviates in two manners from regular backticks. Firstly, it chomps the line terminator from the output. Secondly, it returns only the first line in scalar context instead of a multiline concatinated string. As with regular backticks, the result may be undefined in scalar context, if no result exists. .Sp A child error code with an exit code of 127 indicates that the application could not be run. The scalar result returned by this function is usually undef'ed in this case. .ie n .IP "($stder, $rc) = $manager\->pipe_err_cmd(@arg)" 4 .el .IP "($stder, \f(CW$rc\fR) = \f(CW$manager\fR\->pipe_err_cmd(@arg)" 4 .IX Item "($stder, $rc) = $manager->pipe_err_cmd(@arg)" Create a new process to run the first argument application with the remaining arguments (which may be empty). No shell metacharacter will be evaluated, avoiding a shell invocation. .Sp This method returns a list of two items, the standard error of the program, and the exit code of the program. If the error code is 127, then the application could not be run. Standard output is discarded. .ie n .IP "$manager\->fork_and_exec_cmd(@arg)" 4 .el .IP "\f(CW$manager\fR\->fork_and_exec_cmd(@arg)" 4 .IX Item "$manager->fork_and_exec_cmd(@arg)" Fork off a child to run the first argument in the list. Remaining arguments will be passed, but shell interpolation is avoided. Signals \s-1SIGINT\s0 and \&\s-1SIGQUIT\s0 are ignored in the child process. Stdout is appended to /dev/null, and stderr is dup2 from stdout. The parent waits for the child to finish, and returns the value for the \s-1CHILD_ERROR\s0 variable as result. Use this function as more efficient \fIsystem()\fR call, if you can do not need shell metacharacter evaluation. .Sp Note that the inability to execute the program will result in a status code of 127. .ie n .IP "$manager\->\fIjob_dir()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIjob_dir()\fR" 4 .IX Item "$manager->job_dir()" Return the temporary directory to store job-related files, which have no need for file caching. .ie n .IP "$manager\->\fIsetup_softenv()\fR" 4 .el .IP "\f(CW$manager\fR\->\fIsetup_softenv()\fR" 4 .IX Item "$manager->setup_softenv()" Either add a line to the specified command script file handle to load the user's default SoftEnv configuration, or create a custom SoftEnv script and add commands to the specified command script file handle to load it. .SH "RESPONSES" .IX Header "RESPONSES" When returning from a job interface method, or when sending an intermediate response via the \fIresponse\fR() method, the following hash keys are valid: .IP "\(bu" 4 \&\s-1JOB_STATE\s0 .Sp An integer job state value. These are enumerated in the Globus::GRAM::JobState module. .IP "\(bu" 4 \&\s-1ERROR\s0 .Sp An integer error code. These are enumerated in the Globus::GRAM::Error module. .IP "\(bu" 4 \&\s-1JOB_ID\s0 .Sp A string containing a job identifier, which can be used to poll, cancel, or signal a job in progress. This response should only be returned by the \&\fIsubmit\fR method. .IP "\(bu" 4 \&\s-1SCRATCH_DIR\s0 .Sp A string containing the path to a newly-created scratch directory. This response should only be returned by the \fImake_scratchdir\fR method. .IP "\(bu" 4 \&\s-1STAGED_IN\s0 .Sp A string containing the (\s-1URL,\s0 path) pair for a file which has now been staged in. This response should only be returned by the \fIstage_in\fR method. .IP "\(bu" 4 \&\s-1STAGED_IN_SHARED\s0 .Sp A string containing the (\s-1URL,\s0 path) pair for a file which has now been staged in and symlinked from the cache. This response should only be returned by the \&\fIstage_in_shared\fR method. .IP "\(bu" 4 \&\s-1STAGED_OUT\s0 .Sp A string containing the (path, \s-1URL\s0) pair for a file which has now been staged out by the script. This response should only be returned by the \&\fIstage_out\fR method. globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/StdioMerger.pm0000644000000000000000000000013213765226605023547 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806341.552897731 30 ctime=1607807072.920897731 globus_gram_job_manager_scripts-7.3/StdioMerger.pm0000664000372000037200000000777213765226605023474 0ustar00travistravis00000000000000package Globus::GRAM::StdioMerger; use strict; use Globus::Core::Paths; use File::Copy; sub SEEK_SET {0;} # ugly non-portable hack to avoid "use Fcntl" sub new { my $proto = shift; my $class = ref($proto) || $proto; my $dir = shift; my $stdout = shift; my $stderr = shift; my $self = {}; $self->{STDOUT_FILES} = []; $self->{STDERR_FILES} = []; $self->{STDOUT} = $stdout; $self->{STDERR} = $stderr; $self->{DIR} = $dir; $self->{MERGE_FILENAME} = "$dir/stdio_merge_metadata"; bless $self, $class; if( -s $self->{MERGE_FILENAME}) { $self->load_state(); } return $self; } sub add_file { my $self = shift; my $type = shift; my $array; my $index; my $format = "\%s/std\%s\%03d"; my $new_name; if($type eq 'out') { $array = $self->{STDOUT_FILES}; } else { $array = $self->{STDERR_FILES}; } $index = scalar(@{$array}); $new_name = sprintf($format, $self->{DIR}, $type, $index); if($new_name eq '') { return undef; } push(@{$array}, [ "$type", $new_name, 0 ]); $self->store_state(); return $new_name; } sub poll { my $self = shift; my $final = shift; local(*FH); open(FH, '>>' . $self->{STDERR}); $self->poll_list('STDOUT', $final); $self->poll_list('STDERR', $final); $self->store_state(); close(FH); } sub poll_list { my $self = shift; my $which = shift; my $final = shift; local(*OUT); open(OUT, '>>' . $self->{$which}); select((select(OUT),$|=1)[$[]); # autoflush=1 foreach my $record (@{$self->{$which . '_FILES'}}) { my @stat = CORE::stat($record->[1]); next if @stat == 0; local(*FH); open(FH, '<'. $record->[1]); # We want to merge up to the last newline ... but if # we're in the DONE state, then we want to poll until # EOF do { if($stat[7] > $record->[2]) { my($buffer, $buffersize, $writable); # file has grown... merge in new data $buffersize = $stat[7] - $record->[2]; $buffersize = 4096 if $buffersize > 4096; seek(FH, $record->[2], SEEK_SET); read(FH, $buffer, $buffersize); $writable = $buffer; # We want to do line buffering, so we'll just # strip off all data after the last newline if(! $final) { my @writable; @writable = split(//, $writable); while(@writable) { $_ = pop(@writable); if($_ eq "\n") { push(@writable, "\n"); last; } } $writable = join('', @writable); } $record->[2] += length($writable); print OUT $writable; } } while($final && ($record->[2] < $stat[7])); close(FH); } close(OUT); } sub store_state { my $self = shift; my $tmp_filename = $self->{MERGE_FILENAME} . '.tmp'; my $format = '%s "%s" %s' . "\n"; local(*TMP); open(TMP, '>' . $tmp_filename); foreach(@{$self->{STDOUT_FILES}}, @{$self->{STDERR_FILES}}) { printf TMP $format, $_->[0], $_->[1], $_->[2]; } close(TMP); rename($tmp_filename, $self->{MERGE_FILENAME}); return 0; } sub load_state { my $self = shift; local(*IN); open(IN, '<' . $self->{MERGE_FILENAME}); while() { m/^(out|err)\s+"([^"]+)"\s+([0-9]+)$/ or next; my ($type, $local_filename, $offset) = ($1, $2, $3); if($type eq 'out') { push(@{$self->{STDOUT_FILES}}, [$type, $local_filename, $offset]); } elsif($type eq 'err') { push(@{$self->{STDERR_FILES}}, [$type, $local_filename, $offset]); } } close IN; return 0; } sub pipe_out_cmd { my @result; local(*READ); my $pid = open( READ, "-|" ); return undef unless defined $pid; if ( $pid ) { # parent chomp(@result = ); close(READ); } else { # child open( STDERR, '>>/dev/null' ); select(STDERR); $|=1; select(STDOUT); $|=1; if (! exec { $_[0] } @_ ) { exit(127); } } wantarray ? @result : $result[0]; } 1; globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/configure0000644000000000000000000000013213765227400022663 xustar000000000000000030 mtime=1607806720.948897731 30 atime=1607806956.532897731 30 ctime=1607807072.912897731 globus_gram_job_manager_scripts-7.3/configure0000755000372000037200000032414013765227400022600 0ustar00travistravis00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for globus_gram_job_manager_scripts 7.3. # # Report bugs to . # # # Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # Use a proper internal environment variable to ensure we don't fall # into an infinite loop, continuously re-executing ourselves. if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then _as_can_reexec=no; export _as_can_reexec; # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 as_fn_exit 255 fi # We don't want this to propagate to other subprocesses. { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org and $0: https://github.com/gridcf/gct/issues about your system, $0: including any error possibly output before this $0: message. Then install a modern shell, or manually run $0: the script under such a shell if you do have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # If we had to re-execute with $CONFIG_SHELL, we're ensured to have # already done that, so ensure we don't try to do so again and fall # in an infinite loop. This has already happened in practice. _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='globus_gram_job_manager_scripts' PACKAGE_TARNAME='globus_gram_job_manager_scripts' PACKAGE_VERSION='7.3' PACKAGE_STRING='globus_gram_job_manager_scripts 7.3' PACKAGE_BUGREPORT='https://github.com/gridcf/gct/issues' PACKAGE_URL='' ac_subst_vars='LTLIBOBJS LIBOBJS BUILD_MANPAGES_FALSE BUILD_MANPAGES_TRUE A2X perlmoduledir DIRT_BRANCH_ID DIRT_TIMESTAMP AM_BACKSLASH AM_DEFAULT_VERBOSITY AM_DEFAULT_V AM_V am__untar am__tar AMTAR am__leading_dot SET_MAKE AWK mkdir_p MKDIR_P INSTALL_STRIP_PROGRAM STRIP install_sh MAKEINFO AUTOHEADER AUTOMAKE AUTOCONF ACLOCAL VERSION PACKAGE CYGPATH_W am__isrc INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM PACKAGE_DEPS AGE_VERSION MINOR_VERSION MAJOR_VERSION target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_silent_rules with_perlmoduledir ' ac_precious_vars='build_alias host_alias target_alias' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures globus_gram_job_manager_scripts 7.3 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/globus_gram_job_manager_scripts] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of globus_gram_job_manager_scripts 7.3:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-silent-rules less verbose build output (undo: "make V=1") --disable-silent-rules verbose build output (undo: "make V=0") Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-perlmoduledir=DIR perl module directory [[PREFIX/lib/perl]] Report bugs to . _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF globus_gram_job_manager_scripts configure 7.3 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by globus_gram_job_manager_scripts $as_me 7.3, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo $as_echo "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo $as_echo "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then $as_echo "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then $as_echo "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then # We do not want a PATH search for config.site. case $CONFIG_SITE in #(( -*) ac_site_file1=./$CONFIG_SITE;; */*) ac_site_file1=$CONFIG_SITE;; *) ac_site_file1=./$CONFIG_SITE;; esac elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu MAJOR_VERSION=${PACKAGE_VERSION%%.*} MINOR_VERSION=${PACKAGE_VERSION##*.} AGE_VERSION=3 PACKAGE_DEPS="" ac_aux_dir= for ac_dir in build-aux "$srcdir"/build-aux; do if test -f "$ac_dir/install-sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f "$ac_dir/install.sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f "$ac_dir/shtool"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. am__api_version='1.13' # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if ${ac_cv_path_install+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[\\\"\#\$\&\'\`$am_lf]*) as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". as_fn_error $? "ls -t appears to fail. Make sure there is not a broken alias in your environment" "$LINENO" 5 fi if test "$2" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$2" = conftest.file ) then # Ok. : else as_fn_error $? "newly created file is older than distributed files! Check your system clock" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi rm -f conftest.file test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s&\$&$program_suffix&;$program_transform_name" # Double any \ or $. # By default was `s,x,x', remove it if useless. ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 $as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} fi if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 $as_echo_n "checking for a thread-safe mkdir -p... " >&6; } if test -z "$MKDIR_P"; then if ${ac_cv_path_mkdir+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ 'mkdir (fileutils) '4.1*) ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext break 3;; esac done done done IFS=$as_save_IFS fi test -d ./--version && rmdir ./--version if test "${ac_cv_path_mkdir+set}" = set; then MKDIR_P="$ac_cv_path_mkdir -p" else # As a last resort, use the slow shell script. Don't cache a # value for MKDIR_P within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. MKDIR_P="$ac_install_sh -d" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AWK+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; fi case $enable_silent_rules in # ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=1;; esac am_make=${MAKE-make} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 $as_echo_n "checking whether $am_make supports nested variables... " >&6; } if ${am_cv_make_support_nested_variables+:} false; then : $as_echo_n "(cached) " >&6 else if $as_echo 'TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 $as_echo "$am_cv_make_support_nested_variables" >&6; } if test $am_cv_make_support_nested_variables = yes; then AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AM_BACKSLASH='\' if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." am__isrc=' -I$(srcdir)' # test to see if srcdir already configured if test -f $srcdir/config.status; then as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='globus_gram_job_manager_scripts' VERSION='7.3' cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # mkdir_p='$(MKDIR_P)' # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AMTAR='$${TAR-tar}' # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar pax cpio none' { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to create a pax tar archive" >&5 $as_echo_n "checking how to create a pax tar archive... " >&6; } # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_pax-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do { echo "$as_me:$LINENO: $_am_tar --version" >&5 ($_am_tar --version) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } && break done am__tar="$_am_tar --format=posix -chf - "'"$$tardir"' am__tar_="$_am_tar --format=posix -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x pax -w "$$tardir"' am__tar_='pax -L -x pax -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H pax -L' am__tar_='find "$tardir" -print | cpio -o -H pax -L' am__untar='cpio -i -H pax -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_pax}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file { echo "$as_me:$LINENO: tardir=conftest.dir && eval $am__tar_ >conftest.tar" >&5 (tardir=conftest.dir && eval $am__tar_ >conftest.tar) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } rm -rf conftest.dir if test -s conftest.tar; then { echo "$as_me:$LINENO: $am__untar &5 ($am__untar &5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } { echo "$as_me:$LINENO: cat conftest.dir/file" >&5 (cat conftest.dir/file) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } grep GrepMe conftest.dir/file >/dev/null 2>&1 && break fi done rm -rf conftest.dir if ${am_cv_prog_tar_pax+:} false; then : $as_echo_n "(cached) " >&6 else am_cv_prog_tar_pax=$_am_tool fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_tar_pax" >&5 $as_echo "$am_cv_prog_tar_pax" >&6; } DIRT_TIMESTAMP=1607703417 DIRT_BRANCH_ID=0 # Check whether --with-perlmoduledir was given. if test "${with_perlmoduledir+set}" = set; then : withval=$with_perlmoduledir; if test x$withval = "xno" -o x$withval = "xyes" ; then as_fn_error $? "--with-perlmoduledir requires an argument" "$LINENO" 5 fi perlmoduledir=$withval else perlmoduledir='${libdir}/perl' fi for ac_prog in a2x a2x.py do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_A2X+:} false; then : $as_echo_n "(cached) " >&6 else case $A2X in [\\/]* | ?:[\\/]*) ac_cv_path_A2X="$A2X" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_A2X="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi A2X=$ac_cv_path_A2X if test -n "$A2X"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $A2X" >&5 $as_echo "$A2X" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$A2X" && break done if test "x$A2X" != x; then BUILD_MANPAGES_TRUE= BUILD_MANPAGES_FALSE='#' else BUILD_MANPAGES_TRUE='#' BUILD_MANPAGES_FALSE= fi ac_config_files="$ac_config_files globus-gram-job-manager-scripts-uninstalled.pc globus-gram-job-manager-scripts.pc Makefile" ac_config_files="$ac_config_files globus-gatekeeper-admin" ac_config_files="$ac_config_files globus-job-manager-script.pl" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then if test "x$cache_file" != "x/dev/null"; then { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} if test ! -f "$cache_file" || test -h "$cache_file"; then cat confcache >"$cache_file" else case $cache_file in #( */* | ?:*) mv -f confcache "$cache_file"$$ && mv -f "$cache_file"$$ "$cache_file" ;; #( *) mv -f confcache "$cache_file" ;; esac fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' # Transform confdefs.h into DEFS. # Protect against shell expansion while executing Makefile rules. # Protect against Makefile macro expansion. # # If the first sed substitution is executed (which looks for macros that # take arguments), then branch to the quote section. Otherwise, # look for a macro that doesn't take arguments. ac_script=' :mline /\\$/{ N s,\\\n,, b mline } t clear :clear s/^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*([^)]*)\)[ ]*\(.*\)/-D\1=\2/g t quote s/^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)/-D\1=\2/g t quote b any :quote s/[ `~#$^&*(){}\\|;'\''"<>?]/\\&/g s/\[/\\&/g s/\]/\\&/g s/\$/$$/g H :any ${ g s/^\n// s/\n/ /g p } ' DEFS=`sed -n "$ac_script" confdefs.h` ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs { $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 $as_echo_n "checking that generated files are newer than configure... " >&6; } if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 $as_echo "done" >&6; } if test -z "${BUILD_MANPAGES_TRUE}" && test -z "${BUILD_MANPAGES_FALSE}"; then as_fn_error $? "conditional \"BUILD_MANPAGES\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by globus_gram_job_manager_scripts $as_me 7.3, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE Configuration files: $config_files Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ globus_gram_job_manager_scripts config.status 7.3 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --he | --h | --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "globus-gram-job-manager-scripts-uninstalled.pc") CONFIG_FILES="$CONFIG_FILES globus-gram-job-manager-scripts-uninstalled.pc" ;; "globus-gram-job-manager-scripts.pc") CONFIG_FILES="$CONFIG_FILES globus-gram-job-manager-scripts.pc" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "globus-gatekeeper-admin") CONFIG_FILES="$CONFIG_FILES globus-gatekeeper-admin" ;; "globus-job-manager-script.pl") CONFIG_FILES="$CONFIG_FILES globus-job-manager-script.pl" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= ac_tmp= trap 'exit_status=$? : "${ac_tmp:=$tmp}" { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 ac_tmp=$tmp # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" eval set X " :F $CONFIG_FILES " shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$ac_tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac ac_MKDIR_P=$MKDIR_P case $MKDIR_P in [\\/$]* | ?:[\\/]* ) ;; */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ "$ac_tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$ac_tmp/stdin" case $ac_file in -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac case $ac_file$ac_mode in "globus-gatekeeper-admin":F) chmod a+x globus-gatekeeper-admin ;; "globus-job-manager-script.pl":F) chmod a+x globus-job-manager-script.pl ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/JobDescription.3pm0000644000000000000000000000013213765230140024311 xustar000000000000000030 mtime=1607807072.896897731 30 atime=1607807072.856897731 30 ctime=1607807072.924897731 globus_gram_job_manager_scripts-7.3/JobDescription.3pm0000644000372000037200000002246513765230140024230 0ustar00travistravis00000000000000.\" Automatically generated by Pod::Man 2.27 (Pod::Simple 3.28) .\" .\" Standard preamble: .\" ======================================================================== .de Sp \" Vertical space (when we can't use .PP) .if t .sp .5v .if n .sp .. .de Vb \" Begin verbatim text .ft CW .nf .ne \\$1 .. .de Ve \" End verbatim text .ft R .fi .. .\" Set up some character translations and predefined strings. \*(-- will .\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left .\" double quote, and \*(R" will give a right double quote. \*(C+ will .\" give a nicer C++. Capital omega is used to do unbreakable dashes and .\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff, .\" nothing in troff, for use with C<>. .tr \(*W- .ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p' .ie n \{\ . ds -- \(*W- . ds PI pi . if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch . if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch . ds L" "" . ds R" "" . ds C` "" . ds C' "" 'br\} .el\{\ . ds -- \|\(em\| . ds PI \(*p . ds L" `` . ds R" '' . ds C` . ds C' 'br\} .\" .\" Escape single quotes in literal strings from groff's Unicode transform. .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" .\" If the F register is turned on, we'll generate index entries on stderr for .\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index .\" entries marked with X<> in POD. Of course, you'll have to process the .\" output yourself in some meaningful fashion. .\" .\" Avoid warning from groff about undefined register 'F'. .de IX .. .nr rF 0 .if \n(.g .if rF .nr rF 1 .if (\n(rF:(\n(.g==0)) \{ . if \nF \{ . de IX . tm Index:\\$1\t\\n%\t"\\$2" .. . if !\nF==2 \{ . nr % 0 . nr F 2 . \} . \} .\} .rr rF .\" .\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2). .\" Fear. Run. Save yourself. No user-serviceable parts. . \" fudge factors for nroff and troff .if n \{\ . ds #H 0 . ds #V .8m . ds #F .3m . ds #[ \f1 . ds #] \fP .\} .if t \{\ . ds #H ((1u-(\\\\n(.fu%2u))*.13m) . ds #V .6m . ds #F 0 . ds #[ \& . ds #] \& .\} . \" simple accents for nroff and troff .if n \{\ . ds ' \& . ds ` \& . ds ^ \& . ds , \& . ds ~ ~ . ds / .\} .if t \{\ . ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u" . ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u' . ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u' . ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u' . ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u' . ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u' .\} . \" troff and (daisy-wheel) nroff accents .ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V' .ds 8 \h'\*(#H'\(*b\h'-\*(#H' .ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#] .ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H' .ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u' .ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#] .ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#] .ds ae a\h'-(\w'a'u*4/10)'e .ds Ae A\h'-(\w'A'u*4/10)'E . \" corrections for vroff .if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u' .if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u' . \" for low resolution devices (crt and lpr) .if \n(.H>23 .if \n(.V>19 \ \{\ . ds : e . ds 8 ss . ds o a . ds d- d\h'-1'\(ga . ds D- D\h'-1'\(hy . ds th \o'bp' . ds Th \o'LP' . ds ae ae . ds Ae AE .\} .rm #[ #] #H #V #F C .\" ======================================================================== .\" .IX Title "JobDescription 3pm" .TH JobDescription 3pm "2020-12-12" "perl v5.16.3" "User Contributed Perl Documentation" .\" For nroff, turn off justification. Always turn off hyphenation; it makes .\" way too many mistakes in technical documents. .if n .ad l .nh .SH "NAME" Globus::GRAM::JobDescription \- GRAM Job Description Globus::GRAM::DefaultHandlingJobDescription \- GRAM Job Description with relative path handling .SH "SYNOPSIS" .IX Header "SYNOPSIS" .Vb 1 \& use Globus::GRAM::JobDescription; \& \& $hash = { executable => [ \*(Aq/bin/echo\*(Aq ], arguments => [ \*(Aqhello\*(Aq ] }; \& $description = new Globus::GRAM::JobDescription($filename); \& $description = new Globus::GRAM::JobDescription($hash); \& $executable = $description\->executable(); \& $description\->add($new_attribute, $new_value); \& $description\->save(); \& $description\->save($filename); \& $description\->print_recursive($file_handle); .Ve .SH "DESCRIPTION" .IX Header "DESCRIPTION" This object contains the parameters of a job request in a simple object wrapper. The object may be queried to determine the value of any \s-1RSL\s0 parameter, may be updated with new parameters, and may be saved in the filesystem for later use. .SS "Methods" .IX Subsection "Methods" .ie n .IP "new Globus::GRAM::JobDescription(\fI\fI$filename\fI\fR)" 4 .el .IP "new Globus::GRAM::JobDescription(\fI\f(CI$filename\fI\fR)" 4 .IX Item "new Globus::GRAM::JobDescription($filename)" A JobDescription is constructed from a file consisting of a Perl hash of parameter => array mappings. Every value in the Job Description is stored internally as an array, even single literals, similar to the way an \s-1RSL\s0 tree is parsed in C. An example of such a file is .Sp .Vb 11 \& $description = \& { \& executable => [ \*(Aq/bin/echo\*(Aq ], \& arguments => [ \*(Aqhello\*(Aq, \*(Aqworld\*(Aq ], \& environment => [ \& [ \& \*(AqGLOBUS_GRAM_JOB_CONTACT\*(Aq, \& \*(Aqhttps://globus.org:1234/2345/4332\*(Aq \& ] \& ] \& }; .Ve .Sp which corresponds to the rsl fragment .Sp .Vb 5 \& &(executable = /bin/echo) \& (arguments = hello world) \& (environment = \& (GLOBUS_GRAM_JOB_CONTACT \*(Aqhttps://globus.org:1234/2345/4332\*(Aq) \& ) .Ve .Sp When the library_path \s-1RSL\s0 attribute is specified, this object modifies the environment \s-1RSL\s0 attribute value to append its value to any system specific variables. .ie n .IP "$description\->\fIadd\fR('name', \fI\fI$value\fI\fR);" 4 .el .IP "\f(CW$description\fR\->\fIadd\fR('name', \fI\f(CI$value\fI\fR);" 4 .IX Item "$description->add('name', $value);" Add a parameter to a job description. The parameter will be normalized internally so that the access methods described below will work with this new parameter. As an example, .Sp .Vb 1 \& $description\->add(\*(Aqnew_attribute\*(Aq, $new_value) .Ve .Sp will create a new attribute in the JobDescription, which can be accessed by calling the \fI\f(CI$description\fI\-\fRnew_attribute>() method. .ie n .IP "\fI\fI$value\fI\fR $description\->\fIget\fR('name');" 4 .el .IP "\fI\f(CI$value\fI\fR \f(CW$description\fR\->\fIget\fR('name');" 4 .IX Item "$value $description->get('name');" Get a parameter from a job description. As an example, .Sp .Vb 1 \& $description\->get(\*(Aqattribute\*(Aq) .Ve .Sp will return the appropriate attribute in the JobDescription by name. .ie n .IP "$description\->\fIsave\fR([$filename])" 4 .el .IP "\f(CW$description\fR\->\fIsave\fR([$filename])" 4 .IX Item "$description->save([$filename])" Save the JobDescription, including any added parameters, to the file named by \f(CW$filename\fR if present, or replacing the file used in constructing the object. .ie n .IP "$description\->\fIprint_recursive\fR($file_handle)" 4 .el .IP "\f(CW$description\fR\->\fIprint_recursive\fR($file_handle)" 4 .IX Item "$description->print_recursive($file_handle)" Write the value of the job description object to the file handle specified in the argument list. .ie n .IP "$description\->\fIparameter\fR()" 4 .el .IP "\f(CW$description\fR\->\fIparameter\fR()" 4 .IX Item "$description->parameter()" For any parameter defined in the JobDescription can be accessed by calling the method named by the parameter. The method names are automatically created when the JobDescription is created, and may be invoked with arbitrary SillyCaps or underscores. That is, the parameter gram_myjob may be accessed by the GramMyJob, grammyjob, or gram_my_job method names (and others). .Sp If the attributes does not in this object, then undef will be returned. .Sp In a list context, this returns the list of values associated with an attribute. .Sp In a scalar context, if the attribute's value consist of a single literal, then that literal will be returned, otherwise undef will be returned. .Sp For example, from a JobDescription called \f(CW$d\fR constructed from a description file containing .Sp .Vb 4 \& { \& executable => [ \*(Aq/bin/echo\*(Aq ], \& arguments => [ \*(Aqhello\*(Aq, \*(Aqworld\*(Aq ] \& } .Ve .Sp The following will hold: .Sp .Vb 6 \& $executable = $d\->executable() # \*(Aq/bin/echo\*(Aq \& $arguments = $d\->arguments() # undef \& @executable = $d\->executable() # (\*(Aq/bin/echo\*(Aq) \& @arguments = $d\->arguments() # (\*(Aqhello\*(Aq, \*(Aqworld\*(Aq) \& $not_present = $d\->not_present() # undef \& @not_present = $d\->not_present() # () .Ve .Sp To test for existence of a value: .Sp .Vb 2 \& @not_present = $d\->not_present() \& print "Not defined\en" if(!defined($not_present[0])); .Ve globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/JobManager.pm0000644000000000000000000000013213765226605023330 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607807072.748897731 30 ctime=1607807072.920897731 globus_gram_job_manager_scripts-7.3/JobManager.pm0000664000372000037200000011046113765226605023243 0ustar00travistravis00000000000000# # Globus::GRAM::JobManager # # CVS Information: # $Source$ # $Date$ # $Revision$ # $Author$ # use Globus::GRAM::Error; use Globus::GRAM::JobState; use Globus::GRAM::JobSignal; use Globus::Core::Paths; use Globus::GRAM::JobDescription; use POSIX; use Errno; use File::Path; use File::Copy; use strict; package Globus::GRAM::JobManager; local %ENV; $ENV{PATH} = "$ENV{PATH}:${Globus::Core::Paths::bindir}"; my $cache_pgm = "globus-gass-cache-util"; my $url_copy_pgm = "globus-url-copy"; my $info_pgm = "grid-proxy-info"; =head1 NAME Globus::GRAM::JobManager - Base class for all Job Manager scripts =head1 SYNOPSIS $manager = new Globus::GRAM::JobManager($job_description); $manager->log("Starting new operation"); $manager->nfssync($fileobj,$createflag); $manager->respond($hashref); $hashref = $manager->submit(); $hashref = $manager->poll(); $hashref = $manager->cancel(); $hashref = $manager->signal(); $hashref = $manager->make_scratchdir(); $hashref = $manager->remove_scratchdir(); $hashref = $manager->rewrite_urls(); $hashref = $manager->stage_in(); $hashref = $manager->stage_out(); $hashref = $manager->cache_cleanup(); $hashref = $manager->remote_io_file_create(); $hashref = $manager->proxy_relocate(); $hashref = $manager->proxy_update(); $scalar = $manager->pipe_out_cmd(@arglist); ($stderr, $rc) = $manager->pipe_err_cmd(@arglist); $status = $manager->fork_and_exec_cmd(@arglist); $manager->append_path($hash, $variable, $path); $scalar = $manager->setup_softenv(); =head1 DESCRIPTION The Globus::GRAM::JobManager module implements the base behavior for a Job Manager script interface. Scheduler-specific job manager scripts must inherit from this module in order to be used by the job manager. =head2 Methods =over 4 =item $manager = Globus::GRAM::JobManager->new($JobDescription) Each Globus::GRAM::JobManager object is created by calling the constructor with a single argument, a Globus::GRAM::JobDescription object containing the information about the job request which the script will be modifying. Modules which subclass Globus::GRAM::JobManager MUST call the super-class's constructor, as in this code fragment: my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(@_); bless $self, $class; =cut sub new { my $class = shift; my $self = {}; my $description = shift; my $save; my $savedest; $self->{JobDescription} = $description; bless $self, $class; eval { File::Path::mkpath($self->job_dir(), 0, 0700); }; if ($@) { $self->log("Couldn't create job dir"); } $save = $description->save_job_description(); $self->log("Checking to see if we'll save the job description: $save"); if ($save eq 'yes') { $savedest = "$ENV{HOME}/gram_".$description->uniq_id().".pl"; $self->log("Saving job description to $savedest"); $description->save($savedest); } $self->rewrite_urls(); return $self; } sub getenv { my $self = shift; my $varname = shift; my $description = $self->{JobDescription}; my @result; my @environment = $description->environment(); @result = grep { (ref($_) eq 'ARRAY') && ($_->[0] eq $varname) } @environment; if (exists($result[0])) { return $result[0]->[1]; } return undef; } =item $manager->log($string) Log a message to the job manager log file. The message is preceded by a timestamp. =cut sub log { my $self = shift; my $msg = join("", @_); $msg =~ s/\\/\\\\/g; $msg =~ s/\n/\\n/g; $msg =~ s/\"/\\\"/g; $self->respond({LOG => "msg=\"$msg\""}); return; } =item $manager->nfssync($object,$create) Send an NFS update by touching the file (or directory) in question. If the $create is true, a file will be created. If it is false, the $object will not be created. =cut sub nfssync { my $self = shift; my $object = shift; my $create_p = shift; my $now = time(); unless ( utime( $now, $now, $object ) ) { $self->log( "NFS sync for $object failed (may be harmless): $!" ); # object did not exist if ( $create_p ) { local(*TEMP); if ( open( TEMP, ">$object" ) ) { close(TEMP); $self->log( "NFS sync created $object" ); utime($now, $now, $object) || $self->log( "NFS sync still unable to access $object" ); } else { $self->log( "NFS sync could not create $object: $!" ); } } } $self->log( "Sent NFS sync for $object" ); } =item $manager->respond($message) Send a response to the job manager program. The response may either be a hash reference consisting of a hash of (variable, value) pairs, which will be returned to the job manager, or an already formatted string. This only needs to be directly called by a job manager implementation when the script wants to send a partial response while processing one of the scheduler interface methods (for example, to indicate that a file has been staged). The valid keys for a response are defined in the RESPONSES section. =cut sub respond { my $self = shift; my $result = shift; my $var; if(!ref($result)) { print $result; } else { foreach (keys %{$result}) { $var = uc($_); print "GRAM_SCRIPT_$var:" . $result->{$_} . "\n"; } } } =item $manager->submit() Submit a job request to the scheduler. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to submit the job to the scheduler. A scheduler which implements this method should return a hash reference containing a scheduler-specific job identifier as the value of the hash's JOB_ID key, and optionally, the a GRAM job state as the value of the hash's JOB_STATE key if the job submission was successful; otherwise a Globus::GRAM::Error value should be returned. The job state values are defined in the Globus::GRAM::JobState module. The job parameters (as found in the job rsl) are defined in Globus::GRAM::Jobdescription object in $self->{JobDescription}. For example: return {JOB_STATE => Globus::GRAM::JobState::PENDING, JOB_ID => $job_id}; =cut sub submit { my $self = shift; $self->log("Job Manager module does not implement 'submit'\n"); return Globus::GRAM::Error::UNIMPLEMENTED; } =item $manager->poll() Poll a job's status. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to poll the scheduler. A scheduler which implements this method should return a hash reference containing the JOB_STATE value. The job's ID can be accessed by calling the $self->{JobDescription}->jobid() method. =cut sub poll { my $self = shift; $self->log("Job Manager module Script does not implement 'poll'\n"); return Globus::GRAM::Error::UNIMPLEMENTED; } =item $manager->cancel() Cancel a job. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to remove the job from the scheduler. A scheduler which implements this method should return a hash reference containing the JOB_STATE value. The job's ID can be accessed by calling the $self->{JobDescription}->jobid() method. =cut sub cancel { my $self = shift; $self->log("Job Manager Script does not implement 'cancel'\n"); return Globus::GRAM::Error::UNIMPLEMENTED; } =item $manager->signal() Signal a job. The default implementation returns with the Globus::GRAM::Error::UNIMPLEMENTED error. Scheduler specific subclasses should reimplement this method to remove the job from the scheduler. The JobManager module can determine the job's ID, the signal number, and the (optional) signal arguments from the Job Description by calling it's job_id(), signal(), and and signal_arg() methods, respectively. Depending on the signal, it may be appropriate for the JobManager object to return a hash reference containing a JOB_STATE update. =cut sub signal { my $self = shift; $self->log("Job Manager Script does not implement 'signal'\n"); return Globus::GRAM::Error::UNIMPLEMENTED; } =item $manager->make_scratchdir() Create a scratch directory for a job. The scratch directory location is based on the JobDescription's scratch_dir_base() and scratch_dir() methods. If the scratch_dir() value is a relative path, then a directory will be created as a subdirectory of scratch_dir_base()/scratch_dir(), otherwise, it will be created as a subdirectory of scratch_dir(). This method will return a hash reference containing mapping SCRATCH_DIR to the absolute path of newly created scratch directory if successful. =cut sub make_scratchdir { my $self = shift; my $description = $self->{JobDescription}; my $created = 0; my $tmpnam; my $dirname; my $scratch_prefix; my $scratch_suffix; my @acceptable=split(//, "abcdefghijklmnopqrstuvwxyz". "ABCDEFGHIJKLMNOPQRSTUVWXYZ". "0123456789"); srand(); $self->log( "Entering Job Manager default implementation of make_scratchdir"); $scratch_prefix = $description->scratch_dir_base(); $scratch_suffix = $description->scratch_dir(); if($scratch_suffix =~ m,^/,,) { $scratch_prefix = $scratch_suffix; } elsif ($scratch_suffix !~ m,/$,,) { $scratch_prefix .= "/$scratch_suffix"; } else { $scratch_prefix .= $scratch_suffix; } if(! -w $scratch_prefix) { return Globus::GRAM::Error::INVALID_SCRATCH; } my $Loops = 0; while( (!$created) && ($Loops++ < 100) ) { # Files with names comprised of Ascii values 48-122 should be # relatively easy to remove from the shell if things go bad. my $tmpname = 'gram_scratch_' . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable] . $acceptable[rand() * $#acceptable]; $dirname = "$scratch_prefix/$tmpname"; $self->log( "Trying to create directory named $dirname"); $created = mkdir($dirname, 0700); if($created) { $self->nfssync( $dirname, 0 ); $self->log("I think it was made.... verifying"); if (-l $dirname || ! -d $dirname || ! -o $dirname) { $self->log("nope, somebody's messing with us."); $created = 0; } } elsif( $!{EEXIST} ) { $self->log("Already exist; trying again"); } else { last; } } # We give up if (!$created) { return Globus::GRAM::Error::INVALID_SCRATCH; } $self->log("Using $dirname as the scratch directory for this job."); return {SCRATCH_DIR => $dirname}; } =item $manager->make_scratchdir() Delete a job's scratch directory. All files and subdirectories of the JobDescription's scratch_directory() will be deleted. =cut sub remove_scratchdir { my $self = shift; my $description = $self->{JobDescription}; my $scratch_directory; my $count; $scratch_directory = $description->scratch_directory(); $self->log( "Entering Job Manager default implementation of remove_scratchdir"); if (! defined $scratch_directory ) { $self->log("Scratch directory not defined"); return {}; } $self->log("Removing $scratch_directory"); chdir("/"); $count = File::Path::rmtree($scratch_directory); $self->log("Removed $count files"); return {}; } =item $manager->make_scratchdir() Delete some job-related files. All files listed in the JobDescription's file_cleanup() array will be deleted. =cut sub file_cleanup { my $self = shift; my $description = $self->{JobDescription}; my $count; $self->log( "Entering Job Manager default implementation of file_cleanup"); foreach ($description->file_cleanup()) { if(!defined($_)) { next; } if(ref($_)) { return Globus::GRAM::Error::RSL_FILE_CLEANUP(); } $self->log("Removing $_"); unlink($_); } return {}; } =item $manager->rewrite_urls() Looks up URLs listed in the JobDescription's stdin() and executable(), and replaces them with paths to locally cached copies. =cut sub rewrite_urls { my $self = shift; my $description = $self->{JobDescription}; my $tag = $description->cache_tag() || $ENV{'GLOBUS_GRAM_JOB_CONTACT'}; my $cache_location = $self->getenv('GLOBUS_GASS_CACHE_DEFAULT'); my $url; my $filename; my $filestreamout = []; local %ENV = %ENV; $ENV{GLOBUS_GASS_CACHE_DEFAULT} = $cache_location if ($cache_location); foreach ('stdin', 'executable') { chomp($url = $description->$_()); if($url =~ m|^[a-zA-Z]+://|) { my @arg = ($cache_pgm, '-query', '-t', $tag, $url); $filename = $self->pipe_out_cmd(@arg); if($filename ne '') { $description->add($_, $filename); } } } foreach my $which ('stdout', 'stderr') { my @destinations = $description->$which(); my $first_destination = $destinations[0]; my $first_tag = undef; my $cached_destination = $self->job_dir() . "/$which"; if (ref($first_destination)) { if (scalar(@{$first_destination}) == 2) { $first_tag = $first_destination->[1]; $first_destination = $first_destination->[0]; $self->log("destination = $first_destination, tag is $first_tag"); } else { $first_destination = $first_destination->[0]; $self->log("destination = $first_destination, tag is not present"); } } elsif (scalar(@destinations) == 2) { $first_tag = $destinations[1]; $self->log("destination is $first_destination, tag is $first_tag"); @destinations = ($first_destination); } if (scalar(@destinations) == 1 && $first_destination !~ m|://|) { $description->add($which, $first_destination); next; } elsif (scalar(@destinations) == 1 && $first_destination =~ m|x-gass-cache://|) { my @arg = ($cache_pgm, '-add', '-t', $tag, '-n', $first_destination, 'file:///dev/null'); if (!defined($first_tag)) { $self->log("$which goes to $first_destination in cache"); $self->log("command is " . join(" ", @arg)); $filename = $self->pipe_out_cmd(@arg); if ($! != 0) { $self->log("Error adding to cache $!"); } $first_tag = $tag; } @arg = ($cache_pgm, '-query', '-t', $first_tag, $first_destination); $filename = $self->pipe_out_cmd(@arg); if($filename ne '') { $self->log("$first_destination in cache resolves to $filename"); $description->add($which, $filename); } else { $self->log("$first_destination in cache did not resolve!"); } } else { $description->add($which, $cached_destination); } } return {}; } =item $manager->stage_in() Stage input files need for the job from remote storage. The files to be staged are defined by the array of [URL, path] pairs in the job description's file_stage_in() and file_stage_in_shared() methods. The Globus::GRAM::JobManager module provides an implementation of this functionality using the globus-url-copy and globus-gass-cache programs. Files which are staged in are not automatically removed when the job terminates. This function returns intermediate responses using the Globus::GRAM::JobManager::response() method to let the job manager know when each individual file has been staged. =cut sub stage_in { my $self = shift; my $description = $self->{JobDescription}; my $tag = $description->cache_tag() || $ENV{'GLOBUS_GRAM_JOB_CONTACT'}; my $cache_location = $self->getenv('GLOBUS_GASS_CACHE_DEFAULT'); my ($remote, $local, $local_resolved, $cached, $stderr, $rc, @arg); $self->log("stage_in(enter)"); local %ENV = %ENV; $ENV{GLOBUS_GASS_CACHE_DEFAULT} = $cache_location if ($cache_location); if($description->executable() =~ m|^[a-zA-Z]+://|) { @arg = ($cache_pgm, '-add', '-t', $tag, $description->executable()); ($stderr, $rc) = $self->pipe_out_cmd(@arg); if ($rc != 0) { $self->log("executable staging failed with $stderr"); $self->respond( { 'GT3_FAILURE_TYPE' => 'executable', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $description->executable() }); return Globus::GRAM::Error::STAGING_EXECUTABLE; } $local = $self->pipe_out_cmd($cache_pgm, '-q', '-t', $tag, $description->executable()); if ($local eq '') { $self->respond( { 'GT3_FAILURE_TYPE' => 'executable', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $description->executable() }); return Globus::GRAM::Error::STAGING_EXECUTABLE; } $self->nfssync($local, 0); } if($description->stdin() =~ m|^[a-zA-Z]+://|) { @arg = ($cache_pgm, '-add', '-t', $tag, $description->stdin()); ($stderr, $rc) = $self->pipe_err_cmd(@arg); if ($rc != 0) { $self->log("stdin staging failed with $stderr"); $self->respond( { 'GT3_FAILURE_TYPE' => 'stdin', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $description->stdin() }); return Globus::GRAM::Error::STAGING_STDIN } $local = $self->pipe_out_cmd($cache_pgm, '-q', '-t', $tag, $description->stdin()); if ($local eq '') { $self->respond( { 'GT3_FAILURE_TYPE' => 'stdin', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $description->stdin() }); return Globus::GRAM::Error::STAGING_STDIN; } $self->nfssync($local, 0); } foreach ($description->file_stage_in()) { next unless defined $_; ($remote, $local) = ($_->[0], $_->[1]); if($local !~ m|^/|) { $local_resolved = $description->directory() . '/' . $local; } else { $local_resolved = $local; } @arg = ($url_copy_pgm, $remote, 'file://' . $local_resolved); ($stderr, $rc) = $self->pipe_err_cmd(@arg); if($rc != 0) { $self->log("filestagein staging failed with $stderr"); $self->respond( { 'GT3_FAILURE_TYPE' => 'filestagein', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $remote, 'GT3_FAILURE_DESTINATION' => $local }); return Globus::GRAM::Error::STAGE_IN_FAILED } $self->nfssync($local_resolved, 0); $self->respond({'STAGED_IN' => "$remote $local"}); } foreach($description->file_stage_in_shared()) { next unless defined $_; ($remote, $local) = ($_->[0], $_->[1]); if($local !~ m|^/|) { $local_resolved = $description->directory() . '/' . $local; } else { $local_resolved = $local; } @arg = ($cache_pgm, '-add', '-t', $tag, $remote); ($stderr, $rc) = $self->pipe_err_cmd(@arg); if($rc != 0) { $self->log("filestagein staging failed with $stderr"); $self->respond( { 'GT3_FAILURE_TYPE' => 'filestagein', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $remote, 'GT3_FAILURE_DESTINATION' => $local }); return Globus::GRAM::Error::STAGE_IN_FAILED } @arg = ($cache_pgm, '-query', '-t', $tag, $remote); $cached = $self->pipe_out_cmd(@arg); return Globus::GRAM::Error::STAGE_IN_FAILED if($cached eq ''); symlink($cached, $local_resolved); $self->respond({'STAGED_IN_SHARED' => "$remote $local"}); $self->log( "local=$local" ); $self->log( "local_resolved=$local_resolved" ); $self->nfssync( $local_resolved, 0 ); } $self->log("stage_in(exit)"); return {}; } =item $manager->stage_out() Stage output files generated by this job to remote storage. The files to be staged are defined by the array of [URL, destination] pairs in the job description's file_stage_out() method. The Globus::GRAM::JobManager module provides an implementation of this functionality using the globus-url-copy program. Files which are staged out are not removed by this method. =cut sub stage_out { my $self = shift; my $description = $self->{JobDescription}; my $url_copy = "$Globus::Core::Paths::bindir/globus-url-copy"; my $tag = $description->cache_tag() || $ENV{'GLOBUS_GRAM_JOB_CONTACT'}; my $cache_location = $self->getenv('GLOBUS_GASS_CACHE_DEFAULT'); my ($local, $remote); my ($local_path, $remote_path); my ($stderr, $rc); my @arg; $self->log("stage_out(enter)"); local %ENV = %ENV; $ENV{GLOBUS_GASS_CACHE_DEFAULT} = $cache_location if ($cache_location); if (exists($self->{STDIO_MERGER}) && ref($self->{STDIO_MERGER})) { my $merger = $self->{STDIO_MERGER}; $merger->poll(1); } $self->nfssync( $description->stdout(), 0 ) if defined $description->stdout(); $self->nfssync( $description->stderr(), 0 ) if defined $description->stderr(); foreach ($description->file_stream_out()) { next unless defined $_; ($local, $remote) = ($_->[0], $_->[1]); $self->log("Staging $local to $remote"); # handle a couple of types of URLs for local files $local_path = $local; if($local_path =~ m|^x-gass-cache://|) { @arg = ($cache_pgm, '-query', '-t', $tag, $local_path); $local_path = $self->pipe_out_cmd(@arg); return Globus::GRAM::Error::STAGE_OUT_FAILED if($local_path eq ''); } elsif($local_path =~ m|^file:/|) { $local_path =~ s|^file:/+|/|; } if($local_path !~ m|^/|) { $local_path = $description->directory() . '/' . $local; } $self->nfssync($local_path, 0); $remote_path = $remote; if ($remote_path =~ m|^x-gass-cache://|) { my $msg; @arg = ($cache_pgm, '-add', '-n', $remote_path, '-t', $tag, 'file:///dev/null'); $msg = $self->pipe_out_cmd(@arg); if ($? != 0) { $self->log("Failed creating cache entry for $remote_path $msg"); return Globus::GRAM::Error::STAGE_OUT_FAILED; } @arg = ($cache_pgm, '-query', '-t', $tag, $remote_path); $remote_path = $self->pipe_out_cmd(@arg); if ($? != 0) { $self->log("Query of cache URL path returned $? -- {$remote_path}"); } return Globus::GRAM::Error::STAGE_OUT_FAILED if ($remote_path eq ''); $remote_path = "file://$remote_path"; } @arg = ($url_copy_pgm, 'file://' . $local_path, $remote_path); $self->log("Staging $local to $remote"); $self->log("Concretely, staging $local_path to $remote_path"); $self->log("with command " . join(" ", @arg)); ($stderr, $rc) = $self->pipe_err_cmd(@arg); if($rc != 0) { $self->log("filestageout staging failed with $stderr"); $self->respond( { 'GT3_FAILURE_TYPE' => 'filestageout', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $local, 'GT3_FAILURE_DESTINATION' => $remote }); return Globus::GRAM::Error::STAGE_OUT_FAILED } $self->respond({'STAGED_STREAM' => "$local $remote"}); } foreach ($description->file_stage_out()) { next unless defined $_; ($local, $remote) = ($_->[0], $_->[1]); # handle a couple of types of URLs for local files $local_path = $local; if($local_path =~ m|^x-gass-cache://|) { @arg = ($cache_pgm, '-query', '-t', $tag, $local_path); $local_path = $self->pipe_out_cmd(@arg); return Globus::GRAM::Error::STAGE_OUT_FAILED if($local_path eq ''); } elsif($local_path =~ m|^file:/|) { $local_path =~ s|^file:/+|/|; } if($local_path !~ m|^/|) { $local_path = $description->directory() . '/' . $local; } $self->nfssync($local_path, 0); @arg = ($url_copy_pgm, 'file://' . $local_path, $remote); ($stderr, $rc) = $self->pipe_err_cmd(@arg); if($rc != 0) { $self->log("filestageout staging failed with $stderr"); $self->respond( { 'GT3_FAILURE_TYPE' => 'filestageout', 'GT3_FAILURE_MESSAGE' => $stderr, 'GT3_FAILURE_SOURCE' => $local, 'GT3_FAILURE_DESTINATION' => $remote }); return Globus::GRAM::Error::STAGE_OUT_FAILED } $self->respond({'STAGED_OUT' => "$local $remote"}); } $self->log("stage_out(exit)"); return {}; } =item $manager->cache_cleanup() Clean up cache references in the GASS which match this job's cache tag . =cut sub cache_cleanup { my $self = shift; my $description = $self->{JobDescription}; my $cache_location = $self->getenv('GLOBUS_GASS_CACHE_DEFAULT'); my $tag = $description->cache_tag() || $ENV{'GLOBUS_GRAM_JOB_CONTACT'}; my $job_path = $self->job_dir(); my ($stderr, $rc); $self->log("cache_cleanup(enter)"); local %ENV = %ENV; $ENV{GLOBUS_GASS_CACHE_DEFAULT} = $cache_location if ($cache_location); if ( defined $tag ) { ($stderr, $rc) = $self->pipe_err_cmd($cache_pgm, '-cleanup-tag', '-t', $tag); } $self->log("Cleaning files in job dir $job_path"); chdir("/"); my $count = File::Path::rmtree($job_path); $self->log("Removed $count files from $job_path"); if ($rc != 0) { $self->log("cache cleanup failed with $stderr"); } $self->log("cache_cleanup(exit)"); return {}; } =item $manager->remote_io_file_create() Create the remote I/O file in the job dir which will contain the remote_io_url RSL attribute's value. =cut sub remote_io_file_create { my $self = shift; my $description = $self->{JobDescription}; my $tag = $description->cache_tag() || $ENV{'GLOBUS_GRAM_JOB_CONTACT'}; my $job_path = $self->job_dir(); my $filename = "$job_path/remote_io_url"; $self->log("remote_io_file_create(enter)"); local(*FH); open(FH, ">$filename"); print FH $description->remote_io_url . "\n"; close(FH); $self->nfssync($filename, 0); $self->log("remote_io_file_create(exit)"); return { REMOTE_IO_FILE => $filename }; } =item $manager->proxy_relocate() Relocate the delegated proxy for job execution. Job Managers need to override the default if they intend to relocate the proxy into some common file system other than the cache. The job manager program does not depend on the new location of the proxy. Job Manager modules must not remove the default proxy. =cut sub proxy_relocate { my $self = shift; my $description = $self->{JobDescription}; my $proxy_filename; my $proxy_data; my $new_proxy; $self->log("proxy_relocate(enter)"); $proxy_filename = $self->pipe_out_cmd($info_pgm, '-path'); return Globus::GRAM::Error::OPENING_USER_PROXY if ( $? != 0 || $proxy_filename eq '' ); return { X509_USER_PROXY => $proxy_filename } } =item $hashref = $manager->proxy_update(); =cut sub proxy_update { return {}; } =item $manager->append_path($ref, $var, $path) Append $path to the value of $ref->{$var}, dealing with the case where $ref->{$var} is not yet defined. =cut sub append_path { my $self = shift; my $ref = shift; my $var = shift; my $path = shift; if(exists($ref->{$var})) { $ref->{$var} .= ":$path"; } else { $ref->{$var} = "$path"; } } =item $manager->pipe_out_cmd(@arg) Create a new process to run the first argument application with the remaining arguments (which may be empty). No shell metacharacter will be evaluated, avoiding a shell invocation. Stderr is redirected to /dev/null and stdout is being captured by the parent process, which is also the result returned. In list mode, all lines are returned, in scalar mode, only the first line is being returned. The line termination character is already cut off. Use this function as more efficient backticks, if you do not need shell metacharacter evaluation. Caution: This function deviates in two manners from regular backticks. Firstly, it chomps the line terminator from the output. Secondly, it returns only the first line in scalar context instead of a multiline concatinated string. As with regular backticks, the result may be undefined in scalar context, if no result exists. A child error code with an exit code of 127 indicates that the application could not be run. The scalar result returned by this function is usually undef'ed in this case. =cut sub pipe_out_cmd { my $self = shift; my @result; local(*READ); my $pid = open( READ, "-|" ); return undef unless defined $pid; if ( $pid ) { # parent chomp(@result = ); close(READ); } else { # child open( STDERR, '>>/dev/null' ); select(STDERR); $|=1; select(STDOUT); $|=1; if (! exec { $_[0] } @_ ) { exit(127); } } wantarray ? @result : $result[0]; } =item ($stder, $rc) = $manager->pipe_err_cmd(@arg) Create a new process to run the first argument application with the remaining arguments (which may be empty). No shell metacharacter will be evaluated, avoiding a shell invocation. This method returns a list of two items, the standard error of the program, and the exit code of the program. If the error code is 127, then the application could not be run. Standard output is discarded. =cut sub pipe_err_cmd { my $self = shift; my $result; local(*READ); my $pid = open( READ, "-|" ); return ("Error " . $! . " forking sub-process", -1) unless defined($pid); if ( $pid ) { # parent chomp($result = scalar ); close(READ); } else { # child open( STDERR, '>&STDOUT'); open( STDOUT, '>>/dev/null' ); select(STDERR); $|=1; select(STDOUT); $|=1; if (! exec { $_[0] } @_ ) { exit(127); } } ($result, $?); } =item $manager->fork_and_exec_cmd(@arg) Fork off a child to run the first argument in the list. Remaining arguments will be passed, but shell interpolation is avoided. Signals SIGINT and SIGQUIT are ignored in the child process. Stdout is appended to /dev/null, and stderr is dup2 from stdout. The parent waits for the child to finish, and returns the value for the CHILD_ERROR variable as result. Use this function as more efficient system() call, if you can do not need shell metacharacter evaluation. Note that the inability to execute the program will result in a status code of 127. =cut sub fork_and_exec_cmd { my $self = shift; my $pid = fork(); return undef unless defined $pid; if ( $pid == 0 ) { # child $SIG{INT} = 'IGNORE'; $SIG{QUIT} = 'IGNORE'; # FIXME: what about blocking SIGCHLD? open STDOUT, '>>/dev/null'; open STDERR, '>&STDOUT'; # dup2() exec { $_[0] } @_; exit 127; } # parent waitpid($pid,0); # FIXME: deal with EINTR and EAGAIN $?; } =item $manager->job_dir() Return the temporary directory to store job-related files, which have no need for file caching. =cut sub job_dir { my $self = shift; my $description = $self->{JobDescription}; my $posix_hostname; my $job_dir = $description->job_dir(); if ($job_dir ne '') { $self->log("Using jm supplied job dir: $job_dir"); return $job_dir; } elsif (exists $ENV{GLOBUS_HOSTNAME}) { $posix_hostname = $ENV{GLOBUS_HOSTNAME}; } else { $posix_hostname = (POSIX::uname)[1]; if ($posix_hostname !~ m/\./) { my $aliases = join(' ',(gethostbyname($posix_hostname))[0,1]); for my $alias (split(/\s+/, $aliases)) { if ($alias =~ m/\./) { $posix_hostname = $alias; last; } } } } $job_dir = $ENV{HOME}."/.globus/job/$posix_hostname/".$description->uniq_id(); $self->log("making my own job dir @ $job_dir"); return $job_dir; } =item $manager->setup_softenv() Either add a line to the specified command script file handle to load the user's default SoftEnv configuration, or create a custom SoftEnv script and add commands to the specified command script file handle to load it. =cut sub setup_softenv { my $self = shift; my $softenv_script_name = shift; my $soft_msc = shift; my $softenv_load = shift; my $job_script_fh = shift; my $rc; my $description = $self->{JobDescription}; my @softenv = $description->softenv(); my $enable_default_software_environment = $description->enable_default_software_environment(); if ((not @softenv) && (not $enable_default_software_environment)) { return 0; } if ((not @softenv) && $enable_default_software_environment) { $self->log("default software environment requested"); #load default software environment $rc = print $job_script_fh ". $softenv_load\n"; if (!$rc) { return "print failed: $job_script_fh: $!"; } } else { $self->log("custom software environment requested"); local(*SOFTENV); $rc = open(SOFTENV, '>' . $softenv_script_name); if (!$rc) { return "open failed: $softenv_script_name: $!"; } foreach my $softenv (@softenv) { print SOFTENV $softenv . "\n"; } close(SOFTENV); $rc = print $job_script_fh "$soft_msc $softenv_script_name\n" . ". $softenv_script_name.cache.sh\n" . "rm $softenv_script_name" . " $softenv_script_name.cache.sh\n"; if (!$rc) { return "print failed: $job_script_fh: $!"; } } return 0; } # Transforms a string so that shell metacharacters are escaped and # suitable for use in double quotes sub shell_escape($) { my $self = shift; my $string = shift; $string =~ s/\\/\\\\/g; $string =~ s/\$/\\\$/g; $string =~ s/"/\\\"/g; $string =~ s/`/\\\`/g; return $string; } sub job_description_class { return 'Globus::GRAM::JobDescription'; } 1; =back =head1 RESPONSES When returning from a job interface method, or when sending an intermediate response via the I() method, the following hash keys are valid: =over 4 =item * JOB_STATE An integer job state value. These are enumerated in the Globus::GRAM::JobState module. =item * ERROR An integer error code. These are enumerated in the Globus::GRAM::Error module. =item * JOB_ID A string containing a job identifier, which can be used to poll, cancel, or signal a job in progress. This response should only be returned by the I method. =item * SCRATCH_DIR A string containing the path to a newly-created scratch directory. This response should only be returned by the I method. =item * STAGED_IN A string containing the (URL, path) pair for a file which has now been staged in. This response should only be returned by the I method. =item * STAGED_IN_SHARED A string containing the (URL, path) pair for a file which has now been staged in and symlinked from the cache. This response should only be returned by the I method. =item * STAGED_OUT A string containing the (path, URL) pair for a file which has now been staged out by the script. This response should only be returned by the I method. =back =cut # vim: filetype=perl : globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/globus-job-manager-script.pl.in0000644000000000000000000000013213765226605026704 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806957.732897731 30 ctime=1607807072.916897731 globus_gram_job_manager_scripts-7.3/globus-job-manager-script.pl.in0000664000372000037200000001530213765226605026615 0ustar00travistravis00000000000000#! /usr/bin/perl # Copyright 1999-2006 University of Chicago # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Replace this with your command filtering programing of choice. # A return value of '0' indicates that the command will be allowed, and # any other return value indicates that the command will be denied. #$FILTER_COMMAND = '/usr/local/bin/commsh --check-user'; my $prefix; my $exec_prefix; my $sbindir; my $bindir; my $includedir; my $datarootdir; my $datadir; my $libdir; my $libexecdir; my $sysconfdir; my $sharedstatedir; my $localstatedir; my $perlmoduledir; BEGIN { $prefix = "@prefix@"; $prefix = $ENV{GLOBUS_LOCATION} if $ENV{GLOBUS_LOCATION}; $exec_prefix = "@exec_prefix@"; $sbindir = "@sbindir@"; $bindir = "@bindir@"; $includedir = "@includedir@"; $datarootdir = "@datarootdir@"; $datadir = "@datadir@"; $libdir = "@libdir@"; $libexecdir = "@libexecdir@"; $sysconfdir = "@sysconfdir@"; $sharedstatedir = "@sharedstatedir@"; $localstatedir = "@localstatedir@"; $perlmoduledir = "@perlmoduledir@"; unshift (@INC, $perlmoduledir); } my $library_map = { 'linux' => [ 'LD_LIBRARY_PATH'], 'hpux' => [ 'SHLIB_PATH', 'LD_LIBRARY_PATH' ], 'solaris' => [ 'LD_LIBRARY_PATH', 'LD_LIBRARY_PATH_64' ], 'aix' => [ 'LIBPATH' ], 'irix' => [ 'LD_LIBRARY_PATH', 'LD_LIBRARYN32_PATH', 'LD_LIBRARY64_PATH' ], 'darwin' => [ 'DYLD_LIBRARY_PATH' ], 'freebsd' => [ 'LD_LIBRARY_PATH' ], 'openbsd' => [ 'LD_LIBRARY_PATH' ] }; use Getopt::Long; use Globus::GRAM::Error; use Globus::GRAM::JobDescription; eval "require Globus::GRAM::JobManagerGratia"; # Gratia patch to save DN/FQAN on submit my $gratia_callout = $@?0:1; # Don't make the callout if the file is not there. my($manager_name, $argument_file, $command, $help); GetOptions('manager-name|m=s' => \$manager_name, 'argument-file|f=s' => \$argument_file, 'command|c=s' => \$command, 'libdir|l=s' => \$libdir, "help|h" => \$help); $|=1; if ($help) { my $managers; print "USAGE: $0 -m MANAGER -f FILE -c COMMAND\n"; print "Installed managers:"; foreach (<$perlmoduledir/Globus/GRAM/JobManager/*.pm>) { my $manager = $_; chomp($manager); $manager =~ s|.*/||; $manager =~ s|\.pm$||; print " $manager"; } print "\n"; exit(0); } if ($libdir) { foreach my $libvar (@{$library_map->{$^O}}) { &append_path(\%ENV, $libvar, $libdir); } } if (!defined($manager_name)) { &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); } elsif (!defined($command)) { &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); } elsif ($command ne 'interactive' && !defined($argument_file)) { &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); } my $manager_class = "Globus::GRAM::JobManager::$manager_name"; eval "require $manager_class"; if ($command eq 'interactive') { my $input = ''; my $icmd = ''; my $line; while ($line = <>) { if ($icmd eq '') { chomp($icmd = $line); if ($icmd eq 'quit') { exit(0); } next; } elsif ($line eq "\n") { # End of input my $jd = eval $input; if ($@) { &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); } my $job_description_class = $manager_class->job_description_class(); $job_description = new $job_description_class($jd); $manager = new $manager_class($job_description) or &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); &run_command($icmd, $manager); $input = ''; $icmd = ''; } else { $input .= $line; } } } else { my $error_string = ''; if($@) { $error_string = $@; warn $error_string; } my $job_description_class = $manager_class->job_description_class(); $job_description = new $job_description_class($argument_file); if ($error_string) { my $msg = "Error loading $manager_class: $error_string"; $msg =~ s/\\/\\\\/g; $msg =~ s/\n/\\n/g; $msg =~ s/\"/\\\"/g; print "GRAM_SCRIPT_LOG:msg=\"$msg\"\n"; &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); } $manager = new $manager_class($job_description) or &fail(Globus::GRAM::Error::BAD_SCRIPT_ARG_FILE); run_command($command, $manager); } sub run_command { my ($cmd, $manager) = @_; # If we are submitting a job, we may need to update things like # executable & stdin to look in the cache. # We may also need to run the command through a filter script. if($cmd eq 'submit') { if(defined $FILTER_COMMAND) { my $commandName = join(" ", $job_description->executable, $job_description->arguments); my @filterArgs = split(/\s+/, $FILTER_COMMAND); if(-x $filterArgs[0]) # Make sure program is executable { my $rVal = (system(@filterArgs, $commandName)) >> 8; if($rVal != 0) # The filter command returned an error, so deny. { &fail(Globus::GRAM::Error::AUTHORIZATION_DENIED_EXECUTABLE); goto FILTERED; } } } } $result = $manager->$cmd(); if(UNIVERSAL::isa($result, 'Globus::GRAM::Error')) { &fail($result); } else { $manager->respond($result); # Save cert information for Gratia accounting if appropriate. if ($gratia_callout and ($cmd eq 'submit') and (exists $result->{JOB_ID})) { Globus::GRAM::JobManagerGratia::gratia_save_cert_info($manager, $result->{JOB_ID}); } } FILTERED: if ($command eq 'interactive') { print "\n"; } } sub fail { my $error = shift; print 'GRAM_SCRIPT_ERROR:', $error->value(), "\n"; exit(1) if ($command ne 'interactive'); } sub append_path { my $ref = shift; my $var = shift; my $path = shift; if(exists($ref->{$var})) { $ref->{$var} .= ":$path"; } else { $ref->{$var} = $path; } } globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/globus-gram-job-manager-scripts.pc.in0000644000000000000000000000013213765226605030002 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806957.704897731 30 ctime=1607807072.916897731 globus_gram_job_manager_scripts-7.3/globus-gram-job-manager-scripts.pc.in0000664000372000037200000000031213765226605027706 0ustar00travistravis00000000000000prefix=@prefix@ exec_prefix=@exec_prefix@ libdir=@libdir@ includedir=@includedir@ Name: globus-gram-job-manager-scripts Description: Grid Community Toolkit - GRAM Job ManagerScripts Version: @VERSION@ globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/dirt.sh0000644000000000000000000000013213765227001022252 xustar000000000000000030 mtime=1607806465.984897731 30 atime=1607806473.696897731 30 ctime=1607807072.912897731 globus_gram_job_manager_scripts-7.3/dirt.sh0000644000372000037200000000005313765227001022156 0ustar00travistravis00000000000000DIRT_TIMESTAMP=1607703417 DIRT_BRANCH_ID=0 globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/aclocal.m40000644000000000000000000000013213765227400022617 xustar000000000000000030 mtime=1607806720.692897731 30 atime=1607806720.800897731 30 ctime=1607807072.912897731 globus_gram_job_manager_scripts-7.3/aclocal.m40000644000372000037200000006442013765227400022533 0ustar00travistravis00000000000000# generated automatically by aclocal 1.13.4 -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, [m4_warning([this file was generated for autoconf 2.69. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. To do so, use the procedure documented by the package, typically 'autoreconf'.])]) # Copyright (C) 2002-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version='1.13' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. m4_if([$1], [1.13.4], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) # _AM_AUTOCONF_VERSION(VERSION) # ----------------------------- # aclocal traces this macro to find the Autoconf version. # This is a private macro too. Using m4_define simplifies # the logic in aclocal, which can simply ignore this definition. m4_define([_AM_AUTOCONF_VERSION], []) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.13.4])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to # '$srcdir', '$srcdir/..', or '$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is '.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [dnl Rely on autoconf to set up CDPATH properly. AC_PREREQ([2.50])dnl # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ([2.52])dnl m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.65])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl # test to see if srcdir already configured if test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [AC_DIAGNOSE([obsolete], [$0: two- and three-arguments forms are deprecated.]) m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. m4_if( m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), [ok:ok],, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) AM_MISSING_PROG([AUTOCONF], [autoconf]) AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) AM_MISSING_PROG([AUTOHEADER], [autoheader]) AM_MISSING_PROG([MAKEINFO], [makeinfo]) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # AC_SUBST([mkdir_p], ['$(MKDIR_P)']) # We need awk for the "check" target. The system "awk" is bad on # some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES([CC])], [m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES([CXX])], [m4_define([AC_PROG_CXX], m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], [_AM_DEPENDENCIES([OBJC])], [m4_define([AC_PROG_OBJC], m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], [_AM_DEPENDENCIES([OBJCXX])], [m4_define([AC_PROG_OBJCXX], m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl ]) AC_REQUIRE([AM_SILENT_RULES])dnl dnl The testsuite driver may need to know about EXEEXT, so add the dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi AC_SUBST([install_sh])]) # Copyright (C) 2003-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it is modern enough. # If it is, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= AC_MSG_WARN(['missing' script is too old or missing]) fi ]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), [1])]) # _AM_SET_OPTIONS(OPTIONS) # ------------------------ # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_RUN_LOG(COMMAND) # ------------------- # Run COMMAND, save the exit status in ac_status, and log it. # (This has been adapted from Autoconf's _AC_RUN_LOG macro.) AC_DEFUN([AM_RUN_LOG], [{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD (exit $ac_status); }]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[[\\\"\#\$\&\'\`$am_lf]]*) AC_MSG_ERROR([unsafe absolute working directory name]);; esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi if test "$[2]" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT([yes]) # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi AC_CONFIG_COMMANDS_PRE( [AC_MSG_CHECKING([that generated files are newer than configure]) if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi AC_MSG_RESULT([done])]) rm -f conftest.file ]) # Copyright (C) 2009-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SILENT_RULES([DEFAULT]) # -------------------------- # Enable less verbose build rules; with the default set to DEFAULT # ("yes" being less verbose, "no" or empty being verbose). AC_DEFUN([AM_SILENT_RULES], [AC_ARG_ENABLE([silent-rules], [dnl AS_HELP_STRING( [--enable-silent-rules], [less verbose build output (undo: "make V=1")]) AS_HELP_STRING( [--disable-silent-rules], [verbose build output (undo: "make V=0")])dnl ]) case $enable_silent_rules in @%:@ ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; esac dnl dnl A few 'make' implementations (e.g., NonStop OS and NextStep) dnl do not support nested variable expansions. dnl See automake bug#9928 and bug#10237. am_make=${MAKE-make} AC_CACHE_CHECK([whether $am_make supports nested variables], [am_cv_make_support_nested_variables], [if AS_ECHO([['TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi]) if test $am_cv_make_support_nested_variables = yes; then dnl Using '$V' instead of '$(V)' breaks IRIX make. AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AC_SUBST([AM_V])dnl AM_SUBST_NOTMAKE([AM_V])dnl AC_SUBST([AM_DEFAULT_V])dnl AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl AC_SUBST([AM_DEFAULT_VERBOSITY])dnl AM_BACKSLASH='\' AC_SUBST([AM_BACKSLASH])dnl _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl ]) # Copyright (C) 2001-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor 'install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in "make install-strip", and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Copyright (C) 2006-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) # AM_SUBST_NOTMAKE(VARIABLE) # -------------------------- # Public sister of _AM_SUBST_NOTMAKE. AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004-2013 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of 'v7', 'ustar', or 'pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar # AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AC_SUBST([AMTAR], ['$${TAR-tar}']) # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' m4_if([$1], [v7], [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], [m4_case([$1], [ustar], [# The POSIX 1988 'ustar' format is defined with fixed-size fields. # There is notably a 21 bits limit for the UID and the GID. In fact, # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 # and bug#13588). am_max_uid=2097151 # 2^21 - 1 am_max_gid=$am_max_uid # The $UID and $GID variables are not portable, so we need to resort # to the POSIX-mandated id(1) utility. Errors in the 'id' calls # below are definitely unexpected, so allow the users to see them # (that is, avoid stderr redirection). am_uid=`id -u || echo unknown` am_gid=`id -g || echo unknown` AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) if test $am_uid -le $am_max_uid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) if test $am_gid -le $am_max_gid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi], [pax], [], [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_$1-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar /dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/Makefile.am0000644000000000000000000000013213765226605023021 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806719.440897731 30 ctime=1607807072.908897731 globus_gram_job_manager_scripts-7.3/Makefile.am0000664000372000037200000000306513765226605022735 0ustar00travistravis00000000000000globusperldir=$(perlmoduledir)/Globus/GRAM perldocdir=$(docdir)/perl/Globus/GRAM MAN_DIR=${mandir}/man3 libexec_SCRIPTS = globus-job-manager-script.pl sbin_SCRIPTS = globus-gatekeeper-admin globusperl_DATA = \ JobDescription.pm \ JobManager.pm \ StdioMerger.pm perldoc_DATA = JobManager.html JobDescription.html noinst_DATA = JobManager.3pm JobDescription.3pm doc_DATA = GLOBUS_LICENSE man_MANS = globus-gatekeeper-admin.8 BUILT_SOURCES = JobDescription.pm # Files which we want to put in the source package EXTRA_DIST = \ dirt.sh \ $(globusperl_DATA) \ $(perldoc_DATA) \ $(noinst_DATA) \ globus-gatekeeper-admin.txt \ globus-gatekeeper-admin.8 \ $(doc_DATA) SUFFIXES = .pm .html .3pm .pm.html: pod2html --noindex $< > $@ .pm.3pm: pod2man --section=3pm $< > $@ install-data-local: $(noinst_DATA) $(mkinstalldirs) $(DESTDIR)$(MAN_DIR) for manpage in $?; do \ $(INSTALL_DATA) $$manpage $(DESTDIR)$(MAN_DIR)/Globus::GRAM::`basename $$manpage` || exit 1; \ done all-local: $(globusperl_DATA) $(MKDIR_P) Globus/GRAM if [ -f JobDescription.pm ] ; then \ $(INSTALL) JobDescription.pm Globus/GRAM ; \ else \ $(INSTALL) $(srcdir)/JobDescription.pm Globus/GRAM ; \ fi $(INSTALL) $(srcdir)/JobManager.pm Globus/GRAM $(INSTALL) $(srcdir)/StdioMerger.pm Globus/GRAM clean-local: rm -rf Globus MAINTAINERCLEANFILES = $(perldoc_DATA) $(noinst_DATA) JobDescription.pm if BUILD_MANPAGES SUFFIXES += .txt .8 .txt.8: $(A2X) -d manpage -f manpage $< MAINTAINERCLEANFILES += $(man_MANS) endif distuninstallcheck: @: globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/JobDescription.html0000644000000000000000000000013213765230140024556 xustar000000000000000030 mtime=1607807072.800897731 30 atime=1607807072.760897731 30 ctime=1607807072.924897731 globus_gram_job_manager_scripts-7.3/JobDescription.html0000644000372000037200000001277213765230140024475 0ustar00travistravis00000000000000

NAME

Globus::GRAM::JobDescription - GRAM Job Description Globus::GRAM::DefaultHandlingJobDescription - GRAM Job Description with relative path handling

SYNOPSIS

    use Globus::GRAM::JobDescription;

    $hash = { executable => [ '/bin/echo' ], arguments => [ 'hello' ] };
    $description = new Globus::GRAM::JobDescription($filename);
    $description = new Globus::GRAM::JobDescription($hash);
    $executable = $description->executable();
    $description->add($new_attribute, $new_value);
    $description->save();
    $description->save($filename);
    $description->print_recursive($file_handle);

DESCRIPTION

This object contains the parameters of a job request in a simple object wrapper. The object may be queried to determine the value of any RSL parameter, may be updated with new parameters, and may be saved in the filesystem for later use.

Methods

new Globus::GRAM::JobDescription($filename)

A JobDescription is constructed from a file consisting of a Perl hash of parameter => array mappings. Every value in the Job Description is stored internally as an array, even single literals, similar to the way an RSL tree is parsed in C. An example of such a file is

    $description =
    {
        executable  => [ '/bin/echo' ], 
        arguments   => [ 'hello', 'world' ],
        environment => [
                           [
                               'GLOBUS_GRAM_JOB_CONTACT',
                               'https://globus.org:1234/2345/4332'
                           ]
                       ]
    };

which corresponds to the rsl fragment

    &(executable  = /bin/echo)
     (arguments   = hello world)
     (environment =
         (GLOBUS_GRAM_JOB_CONTACT 'https://globus.org:1234/2345/4332')
     )

When the library_path RSL attribute is specified, this object modifies the environment RSL attribute value to append its value to any system specific variables.

$description->add('name', $value);

Add a parameter to a job description. The parameter will be normalized internally so that the access methods described below will work with this new parameter. As an example,

    $description->add('new_attribute', $new_value)

will create a new attribute in the JobDescription, which can be accessed by calling the $description-new_attribute>() method.

$value $description->get('name');

Get a parameter from a job description. As an example,

    $description->get('attribute')

will return the appropriate attribute in the JobDescription by name.

$description->save([$filename])

Save the JobDescription, including any added parameters, to the file named by $filename if present, or replacing the file used in constructing the object.

$description->print_recursive($file_handle)

Write the value of the job description object to the file handle specified in the argument list.

$description->parameter()

For any parameter defined in the JobDescription can be accessed by calling the method named by the parameter. The method names are automatically created when the JobDescription is created, and may be invoked with arbitrary SillyCaps or underscores. That is, the parameter gram_myjob may be accessed by the GramMyJob, grammyjob, or gram_my_job method names (and others).

If the attributes does not in this object, then undef will be returned.

In a list context, this returns the list of values associated with an attribute.

In a scalar context, if the attribute's value consist of a single literal, then that literal will be returned, otherwise undef will be returned.

For example, from a JobDescription called $d constructed from a description file containing

    {
        executable => [ '/bin/echo' ],
        arguments  => [ 'hello', 'world' ]
    }

The following will hold:

    $executable = $d->executable()    # '/bin/echo'
    $arguments = $d->arguments()      # undef
    @executable = $d->executable()    # ('/bin/echo')
    @arguments = $d->arguments()      # ('hello', 'world')
    $not_present = $d->not_present()  # undef
    @not_present = $d->not_present()  # ()

To test for existence of a value:

    @not_present = $d->not_present()
    print "Not defined\n" if(!defined($not_present[0]));
globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/GLOBUS_LICENSE0000644000000000000000000000013213765226605023045 xustar000000000000000030 mtime=1607806341.380897731 30 atime=1607806341.552897731 30 ctime=1607807072.928897731 globus_gram_job_manager_scripts-7.3/GLOBUS_LICENSE0000664000372000037200000002367613765226605022773 0ustar00travistravis00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/configure.ac0000644000000000000000000000013213765226605023253 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806472.728897731 30 ctime=1607807072.912897731 globus_gram_job_manager_scripts-7.3/configure.ac0000664000372000037200000000226613765226605023171 0ustar00travistravis00000000000000AC_PREREQ([2.60]) AC_INIT([globus_gram_job_manager_scripts],[7.3],[https://github.com/gridcf/gct/issues]) AC_SUBST([MAJOR_VERSION], [${PACKAGE_VERSION%%.*}]) AC_SUBST([MINOR_VERSION], [${PACKAGE_VERSION##*.}]) AC_SUBST([AGE_VERSION], [3]) AC_SUBST([PACKAGE_DEPS], [""]) AC_CONFIG_AUX_DIR([build-aux]) AM_INIT_AUTOMAKE([1.11 foreign parallel-tests tar-pax]) m4_include([dirt.sh]) AC_SUBST(DIRT_TIMESTAMP) AC_SUBST(DIRT_BRANCH_ID) AC_PROG_MKDIR_P AC_ARG_WITH([perlmoduledir], AC_HELP_STRING([--with-perlmoduledir=DIR], [perl module directory [[PREFIX/lib/perl]]]), [ if test x$withval = "xno" -o x$withval = "xyes" ; then AC_MSG_ERROR([--with-perlmoduledir requires an argument]) fi perlmoduledir=$withval ], [ perlmoduledir='${libdir}/perl' ]) AC_SUBST(perlmoduledir) AC_PATH_PROGS([A2X], [a2x a2x.py]) AM_CONDITIONAL(BUILD_MANPAGES, [test "x$A2X" != x]) AC_CONFIG_FILES( globus-gram-job-manager-scripts-uninstalled.pc globus-gram-job-manager-scripts.pc Makefile) AC_CONFIG_FILES([globus-gatekeeper-admin], [chmod a+x globus-gatekeeper-admin]) AC_CONFIG_FILES([globus-job-manager-script.pl], [chmod a+x globus-job-manager-script.pl]) AC_OUTPUT globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/Makefile.in0000644000000000000000000000013213765227401023025 xustar000000000000000030 mtime=1607806721.268897731 30 atime=1607806957.712897731 30 ctime=1607807072.908897731 globus_gram_job_manager_scripts-7.3/Makefile.in0000644000372000037200000006666013765227401022751 0ustar00travistravis00000000000000# Makefile.in generated by automake 1.13.4 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2013 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = test -n '$(MAKEFILE_LIST)' && test -n '$(MAKELEVEL)' am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : @BUILD_MANPAGES_TRUE@am__append_1 = .txt .8 @BUILD_MANPAGES_TRUE@am__append_2 = $(man_MANS) subdir = . DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \ $(top_srcdir)/configure $(am__configure_deps) \ $(srcdir)/globus-gram-job-manager-scripts-uninstalled.pc.in \ $(srcdir)/globus-gram-job-manager-scripts.pc.in \ $(srcdir)/globus-gatekeeper-admin.in \ $(srcdir)/globus-job-manager-script.pl.in build-aux/install-sh \ build-aux/missing $(top_srcdir)/build-aux/install-sh \ $(top_srcdir)/build-aux/missing ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/dirt.sh $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_CLEAN_FILES = globus-gram-job-manager-scripts-uninstalled.pc \ globus-gram-job-manager-scripts.pc globus-gatekeeper-admin \ globus-job-manager-script.pl CONFIG_CLEAN_VPATH_FILES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(libexecdir)" "$(DESTDIR)$(sbindir)" \ "$(DESTDIR)$(man8dir)" "$(DESTDIR)$(docdir)" \ "$(DESTDIR)$(globusperldir)" "$(DESTDIR)$(perldocdir)" SCRIPTS = $(libexec_SCRIPTS) $(sbin_SCRIPTS) AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac man8dir = $(mandir)/man8 NROFF = nroff MANS = $(man_MANS) DATA = $(doc_DATA) $(globusperl_DATA) $(noinst_DATA) $(perldoc_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ if test -d "$(distdir)"; then \ find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -rf "$(distdir)" \ || { sleep 5 && rm -rf "$(distdir)"; }; \ else :; fi am__post_remove_distdir = $(am__remove_distdir) DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best DIST_TARGETS = dist-gzip distuninstallcheck_listfiles = find . -type f -print am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' distcleancheck_listfiles = find . -type f -print A2X = @A2X@ ACLOCAL = @ACLOCAL@ AGE_VERSION = @AGE_VERSION@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DIRT_BRANCH_ID = @DIRT_BRANCH_ID@ DIRT_TIMESTAMP = @DIRT_TIMESTAMP@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LTLIBOBJS = @LTLIBOBJS@ MAJOR_VERSION = @MAJOR_VERSION@ MAKEINFO = @MAKEINFO@ MINOR_VERSION = @MINOR_VERSION@ MKDIR_P = @MKDIR_P@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_DEPS = @PACKAGE_DEPS@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ VERSION = @VERSION@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ am__leading_dot = @am__leading_dot@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build_alias = @build_alias@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ host_alias = @host_alias@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ perlmoduledir = @perlmoduledir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ globusperldir = $(perlmoduledir)/Globus/GRAM perldocdir = $(docdir)/perl/Globus/GRAM MAN_DIR = ${mandir}/man3 libexec_SCRIPTS = globus-job-manager-script.pl sbin_SCRIPTS = globus-gatekeeper-admin globusperl_DATA = \ JobDescription.pm \ JobManager.pm \ StdioMerger.pm perldoc_DATA = JobManager.html JobDescription.html noinst_DATA = JobManager.3pm JobDescription.3pm doc_DATA = GLOBUS_LICENSE man_MANS = globus-gatekeeper-admin.8 BUILT_SOURCES = JobDescription.pm # Files which we want to put in the source package EXTRA_DIST = \ dirt.sh \ $(globusperl_DATA) \ $(perldoc_DATA) \ $(noinst_DATA) \ globus-gatekeeper-admin.txt \ globus-gatekeeper-admin.8 \ $(doc_DATA) SUFFIXES = .pm .html .3pm $(am__append_1) MAINTAINERCLEANFILES = $(perldoc_DATA) $(noinst_DATA) \ JobDescription.pm $(am__append_2) all: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) all-am .SUFFIXES: .SUFFIXES: .pm .html .3pm .txt .8 am--refresh: Makefile @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --foreign Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): globus-gram-job-manager-scripts-uninstalled.pc: $(top_builddir)/config.status $(srcdir)/globus-gram-job-manager-scripts-uninstalled.pc.in cd $(top_builddir) && $(SHELL) ./config.status $@ globus-gram-job-manager-scripts.pc: $(top_builddir)/config.status $(srcdir)/globus-gram-job-manager-scripts.pc.in cd $(top_builddir) && $(SHELL) ./config.status $@ globus-gatekeeper-admin: $(top_builddir)/config.status $(srcdir)/globus-gatekeeper-admin.in cd $(top_builddir) && $(SHELL) ./config.status $@ globus-job-manager-script.pl: $(top_builddir)/config.status $(srcdir)/globus-job-manager-script.pl.in cd $(top_builddir) && $(SHELL) ./config.status $@ install-libexecSCRIPTS: $(libexec_SCRIPTS) @$(NORMAL_INSTALL) @list='$(libexec_SCRIPTS)'; test -n "$(libexecdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(libexecdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(libexecdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(libexecdir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(libexecdir)$$dir" || exit $$?; \ } \ ; done uninstall-libexecSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(libexec_SCRIPTS)'; test -n "$(libexecdir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(libexecdir)'; $(am__uninstall_files_from_dir) install-sbinSCRIPTS: $(sbin_SCRIPTS) @$(NORMAL_INSTALL) @list='$(sbin_SCRIPTS)'; test -n "$(sbindir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(sbindir)'"; \ $(MKDIR_P) "$(DESTDIR)$(sbindir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ done | \ sed -e 'p;s,.*/,,;n' \ -e 'h;s|.*|.|' \ -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ if ($$2 == $$4) { files[d] = files[d] " " $$1; \ if (++n[d] == $(am__install_max)) { \ print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ else { print "f", d "/" $$4, $$1 } } \ END { for (d in files) print "f", d, files[d] }' | \ while read type dir files; do \ if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ test -z "$$files" || { \ echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(sbindir)$$dir'"; \ $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(sbindir)$$dir" || exit $$?; \ } \ ; done uninstall-sbinSCRIPTS: @$(NORMAL_UNINSTALL) @list='$(sbin_SCRIPTS)'; test -n "$(sbindir)" || exit 0; \ files=`for p in $$list; do echo "$$p"; done | \ sed -e 's,.*/,,;$(transform)'`; \ dir='$(DESTDIR)$(sbindir)'; $(am__uninstall_files_from_dir) install-man8: $(man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(man_MANS)'; \ test -n "$(man8dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man8dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man8dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.8[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man8dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man8dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man8dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man8dir)" || exit $$?; }; \ done; } uninstall-man8: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man8dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.8[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^8][0-9a-z]*$$,8,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man8dir)'; $(am__uninstall_files_from_dir) install-docDATA: $(doc_DATA) @$(NORMAL_INSTALL) @list='$(doc_DATA)'; test -n "$(docdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(docdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(docdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(docdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(docdir)" || exit $$?; \ done uninstall-docDATA: @$(NORMAL_UNINSTALL) @list='$(doc_DATA)'; test -n "$(docdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(docdir)'; $(am__uninstall_files_from_dir) install-globusperlDATA: $(globusperl_DATA) @$(NORMAL_INSTALL) @list='$(globusperl_DATA)'; test -n "$(globusperldir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(globusperldir)'"; \ $(MKDIR_P) "$(DESTDIR)$(globusperldir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(globusperldir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(globusperldir)" || exit $$?; \ done uninstall-globusperlDATA: @$(NORMAL_UNINSTALL) @list='$(globusperl_DATA)'; test -n "$(globusperldir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(globusperldir)'; $(am__uninstall_files_from_dir) install-perldocDATA: $(perldoc_DATA) @$(NORMAL_INSTALL) @list='$(perldoc_DATA)'; test -n "$(perldocdir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(perldocdir)'"; \ $(MKDIR_P) "$(DESTDIR)$(perldocdir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(perldocdir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(perldocdir)" || exit $$?; \ done uninstall-perldocDATA: @$(NORMAL_UNINSTALL) @list='$(perldoc_DATA)'; test -n "$(perldocdir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(perldocdir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__post_remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 $(am__post_remove_distdir) dist-lzip: distdir tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz $(am__post_remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz $(am__post_remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__post_remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__post_remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__post_remove_distdir) dist dist-all: $(MAKE) $(AM_MAKEFLAGS) $(DIST_TARGETS) am__post_remove_distdir='@:' $(am__post_remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lz*) \ lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir) chmod u+w $(distdir) mkdir $(distdir)/_build $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__post_remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) check-am all-am: Makefile $(SCRIPTS) $(MANS) $(DATA) all-local installdirs: for dir in "$(DESTDIR)$(libexecdir)" "$(DESTDIR)$(sbindir)" "$(DESTDIR)$(man8dir)" "$(DESTDIR)$(docdir)" "$(DESTDIR)$(globusperldir)" "$(DESTDIR)$(perldocdir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: $(BUILT_SOURCES) $(MAKE) $(AM_MAKEFLAGS) install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES) -test -z "$(MAINTAINERCLEANFILES)" || rm -f $(MAINTAINERCLEANFILES) clean: clean-am clean-am: clean-generic clean-local mostlyclean-am distclean: distclean-am -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-data-local install-docDATA \ install-globusperlDATA install-man install-perldocDATA install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-libexecSCRIPTS install-sbinSCRIPTS install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man8 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-docDATA uninstall-globusperlDATA \ uninstall-libexecSCRIPTS uninstall-man uninstall-perldocDATA \ uninstall-sbinSCRIPTS uninstall-man: uninstall-man8 .MAKE: all check install install-am install-strip .PHONY: all all-am all-local am--refresh check check-am clean \ clean-generic clean-local cscopelist-am ctags-am dist dist-all \ dist-bzip2 dist-gzip dist-lzip dist-shar dist-tarZ dist-xz \ dist-zip distcheck distclean distclean-generic distcleancheck \ distdir distuninstallcheck dvi dvi-am html html-am info \ info-am install install-am install-data install-data-am \ install-data-local install-docDATA install-dvi install-dvi-am \ install-exec install-exec-am install-globusperlDATA \ install-html install-html-am install-info install-info-am \ install-libexecSCRIPTS install-man install-man8 install-pdf \ install-pdf-am install-perldocDATA install-ps install-ps-am \ install-sbinSCRIPTS install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic pdf pdf-am ps ps-am tags-am \ uninstall uninstall-am uninstall-docDATA \ uninstall-globusperlDATA uninstall-libexecSCRIPTS \ uninstall-man uninstall-man8 uninstall-perldocDATA \ uninstall-sbinSCRIPTS .pm.html: pod2html --noindex $< > $@ .pm.3pm: pod2man --section=3pm $< > $@ install-data-local: $(noinst_DATA) $(mkinstalldirs) $(DESTDIR)$(MAN_DIR) for manpage in $?; do \ $(INSTALL_DATA) $$manpage $(DESTDIR)$(MAN_DIR)/Globus::GRAM::`basename $$manpage` || exit 1; \ done all-local: $(globusperl_DATA) $(MKDIR_P) Globus/GRAM if [ -f JobDescription.pm ] ; then \ $(INSTALL) JobDescription.pm Globus/GRAM ; \ else \ $(INSTALL) $(srcdir)/JobDescription.pm Globus/GRAM ; \ fi $(INSTALL) $(srcdir)/JobManager.pm Globus/GRAM $(INSTALL) $(srcdir)/StdioMerger.pm Globus/GRAM clean-local: rm -rf Globus @BUILD_MANPAGES_TRUE@.txt.8: @BUILD_MANPAGES_TRUE@ $(A2X) -d manpage -f manpage $< distuninstallcheck: @: # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/globus-gatekeeper-admin.80000644000000000000000000000013213765226605025551 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806341.552897731 30 ctime=1607807072.928897731 globus_gram_job_manager_scripts-7.3/globus-gatekeeper-admin.80000664000372000037200000000724713765226605025473 0ustar00travistravis00000000000000'\" t .\" Title: globus-gatekeeper-admin .\" Author: [see the "AUTHOR" section] .\" Generator: DocBook XSL Stylesheets vsnapshot .\" Date: 03/31/2018 .\" Manual: Grid Community Toolkit Manual .\" Source: Grid Community Toolkit 6 .\" Language: English .\" .TH "GLOBUS\-GATEKEEPER\-" "8" "03/31/2018" "Grid Community Toolkit 6" "Grid Community Toolkit Manual" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" globus-gatekeeper-admin \- Manage globus\-gatekeeper services .SH "SYNOPSIS" .sp \fBglobus\-gatekeeper\-admin\fR [\-h] .sp \fBglobus\-gatekeeper\-admin\fR \-l [\-n \fINAME\fR] .sp \fBglobus\-gatekeeper\-admin\fR \-e \fISERVICE\fR [\-n \fINAME\fR] .sp \fBglobus\-gatekeeper\-admin\fR \-E .sp \fBglobus\-gatekeeper\-admin\fR \-d \fISERVICE\fR .SH "DESCRIPTION" .sp The \fBglobus\-gatekeeper\-admin\fR program manages service entries which are used by the \fBglobus\-gatekeeper\fR to execute services\&. Service entries are located in the /etc/grid\-services directory\&. The \fBglobus\-gatekeeper\-admin\fR can list, enable, or disable specific services, or set a service as the default\&. The \fI\-h\fR command\-line option shows a brief usage message\&. .sp The \fB\-l\fR command\-line option to \fBglobus\-gatekeeper\-admin\fR will cause it to list all of the services which are available to be run by the \fBglobus\-gatekeeper\fR\&. In the output, the service name will be followed by its status in brackets\&. Possible status strings are \fBENABLED\fR, \fBDISABLED\fR, and \fBALIAS to \fR\fB\fINAME\fR\fR, where \fINAME\fR is another service name\&. .sp If the \fB\-n \fR\fB\fINAME\fR\fR is used, then only information about the service named \fINAME\fR is printed\&. .sp The \fB\-e \fR\fB\fISERVICE\fR\fR command\-line option to \fBglobus\-gatekeeper\-admin\fR will cause it to enable a service so that it may be run by the \fBglobus\-gatekeeper\fR\&. .sp If the \fB\-n \fR\fB\fINAME\fR\fR option is used as well, then the service will be enabled with the alias \fINAME\fR\&. .sp The \fB\-E\fR command\-line option to \fBglobus\-gatekeeper\-admin\fR will cause it to enable a service alias with the name jobmanager\&. The \fBglobus\-gatekeeper\-admin\fR program will choose the first service it finds as the default\&. To enable a particular service as the default, use the \fB\-e\fR parameter described above with the \fB\-n\fR parameter\&. .sp The \fB\-d \fR\fB\fISERVICE\fR\fR command\-line option to \fBglobus\-gatekeeper\-admin\fR will cause it to disable a service so that it may not be run by the \fBglobus\-gatekeeper\fR\&. All aliases to a disabled service are also disabled\&. .SH "FILES" .sp The following files affect \fBglobus\-gatekeeper\-admin\fR execution: .PP \fB/etc/grid\-services\fR .RS 4 Default location of enabled gatekeeper service descriptions\&. .RE .SH "AUTHOR" .sp Copyright \(co 1999\-2016 University of Chicago globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/globus-gatekeeper-admin.txt0000644000000000000000000000013213765226605026221 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806341.552897731 30 ctime=1607807072.928897731 globus_gram_job_manager_scripts-7.3/globus-gatekeeper-admin.txt0000664000372000037200000000445713765226605026143 0ustar00travistravis00000000000000GLOBUS-GATEKEEPER-ADMIN(8) ========================== :doctype: manpage :man source: Grid Community Toolkit :man version: 6 :man manual: Grid Community Toolkit Manual :man software: Grid Community Toolkit NAME ---- globus-gatekeeper-admin - Manage globus-gatekeeper services SYNOPSIS -------- *globus-gatekeeper-admin* [-h] *globus-gatekeeper-admin* -l [-n 'NAME'] *globus-gatekeeper-admin* -e 'SERVICE' [-n 'NAME'] *globus-gatekeeper-admin* -E *globus-gatekeeper-admin* -d 'SERVICE' DESCRIPTION ----------- The *globus-gatekeeper-admin* program manages service entries which are used by the *globus-gatekeeper* to execute services. Service entries are located in the +/etc/grid-services+ directory. The *globus-gatekeeper-admin* can list, enable, or disable specific services, or set a service as the default. The '-h' command-line option shows a brief usage message. The *-l* command-line option to *globus-gatekeeper-admin* will cause it to list all of the services which are available to be run by the *globus-gatekeeper*. In the output, the service name will be followed by its status in brackets. Possible status strings are *ENABLED*, *DISABLED*, and *ALIAS to 'NAME'*, where 'NAME' is another service name. If the *-n 'NAME'* is used, then only information about the service named 'NAME' is printed. The *-e 'SERVICE'* command-line option to *globus-gatekeeper-admin* will cause it to enable a service so that it may be run by the *globus-gatekeeper*. If the *-n 'NAME'* option is used as well, then the service will be enabled with the alias 'NAME'. The *-E* command-line option to *globus-gatekeeper-admin* will cause it to enable a service alias with the name +jobmanager+. The *globus-gatekeeper-admin* program will choose the first service it finds as the default. To enable a particular service as the default, use the *-e* parameter described above with the *-n* parameter. The *-d 'SERVICE'* command-line option to *globus-gatekeeper-admin* will cause it to disable a service so that it may not be run by the *globus-gatekeeper*. All aliases to a disabled service are also disabled. FILES ----- The following files affect *globus-gatekeeper-admin* execution: */etc/grid-services*:: Default location of enabled gatekeeper service descriptions. AUTHOR ------ Copyright (C) 1999-2016 University of Chicago globus_gram_job_manager_scripts-7.3/PaxHeaders.20921/globus-gatekeeper-admin.in0000644000000000000000000000013213765226605026010 xustar000000000000000030 mtime=1607806341.384897731 30 atime=1607806957.724897731 30 ctime=1607807072.916897731 globus_gram_job_manager_scripts-7.3/globus-gatekeeper-admin.in0000664000372000037200000001503013765226605025717 0ustar00travistravis00000000000000#! /bin/sh # Copyright 1999-2011 University of Chicago # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. prefix="${GLOBUS_LOCATION:-@prefix@}" exec_prefix="@exec_prefix@"; sbindir="@sbindir@"; bindir="@bindir@"; datarootdir="@datarootdir@"; datadir="@datadir@"; libexecdir="@libexecdir@"; sysconfdir="@sysconfdir@"; sharedstatedir="@sharedstatedir@"; localstatedir="@localstatedir@"; enable="" disable="" name="" dealias() { expr "$(ls -l "$1")" : ".* -> \(.*\)" } while getopts "e:d:En:lh" arg; do case $arg in e) if expr "$OPTARG" : "[^/]*/.*" > /dev/null 2>&1; then echo "Illegal service name \"$OPTARG\"" 1>&2 exit 1 fi if [ ! -f "$sysconfdir/grid-services/available/$OPTARG" ]; then echo "Service \"$OPTARG\" does not exist" 1>&2 exit 1 fi enable="$OPTARG" if [ -n "$mode" ]; then echo "Duplicate or incompatible options: $mode and -e" 1>&2 exit 1 fi mode="-e" ;; E) enable="" if [ -f "${sysconfdir}/grid-services/available/jobmanager" ]; then enable=jobmanager else for f in "${sysconfdir}/grid-services/available/jobmanager"*; do if [ -f "$f" ]; then enable="$(basename "$f")" break fi done fi if [ "$enable" = "" ]; then echo "No available jobmanager services to enable" 1>&2 exit 1 fi if [ "$enable" != "jobmanager" ];then name=jobmanager fi if [ -n "$mode" ]; then echo "Duplicate or incompatible options: $mode and -E" 1>&2 exit 1 fi mode="-e" ;; d) if expr "$OPTARG" : "[^/]*/.*" > /dev/null 2>&1; then echo "Illegal service name \"$OPTARG\"" 1>&2 exit 1 fi if [ ! -L "$sysconfdir/grid-services/$OPTARG" ]; then echo "Service \"$OPTARG\" is not enabled" 1>&2 exit 1 fi disable="$OPTARG" if [ -n "$mode" ]; then echo "Duplicate or incompatible options: $mode and -d" 1>&2 exit 1 fi mode="-d" ;; n) if expr "$OPTARG" : "[^/]*/.*" > /dev/null 2>&1; then echo "Illegal service name \"$OPTARG\"" 1>&2 exit 1 fi name="$OPTARG" ;; l) if [ -n "$mode" ]; then echo "Duplicate or incompatible options: $mode and -l" 1>&2 exit 1 fi mode="-l" ;; h) echo "Usage: $(basename $0) OPTIONS" echo " -e SERVICE Enable SERVICE" echo " -d SERVICE Disable SERVICE" echo " -n NAME Service name" echo " -E Enable default jobmanager service" echo " -l List services" echo " -h Print this help and exit" exit 0 ;; *) echo "$(basename $0): illegal option: $arg" echo "Usage: $(basename $0): [-e SERVICE] [-d SERVICE] [-l] [-h]" exit 1 ;; esac done if [ -z "$mode" ]; then echo "Usage: $(basename $0): [-e SERVICE] [-d SERVICE] [-l] [-h]" exit 1 fi rc=0 case $mode in -e) ln -sf "available/$enable" "$sysconfdir/grid-services/${enable}" if [ -n "${name}" ]; then ln -sf "$enable" "$sysconfdir/grid-services/${name}" fi rc=$? ;; -d) for service in "$sysconfdir"/grid-services/*; do if [ -f "$service" -o -L "$service" ]; then linkname="$(dealias "$service")" if [ "$linkname" = "$(basename "${disable}")" ]; then rm "$service" fi fi done rm "$sysconfdir/grid-services/${disable}" rc=$? ;; -l) if [ -z "$name" ]; then enabled="" for service in "${sysconfdir}"/grid-services/available/*; do if [ -f "$service" ]; then service_name="$(basename "$service")" enabled="${enabled:+"$enabled "}$service_name" if [ -h "$sysconfdir/grid-services/$service_name" ]; then echo "$service_name [ENABLED]" else echo "$service_name [DISABLED]" fi fi done for service in "${sysconfdir}"/grid-services/*; do if [ -f "$service" ]; then service_name="$(basename "$service")" found=0 for i in $enabled; do if [ "$i" = "$service_name" ]; then found=1 fi done if [ "$found" -eq 0 ]; then linkname="$(dealias "$service")" echo "$service_name [ALIAS to $(basename "$linkname")]" fi fi done else if [ -f "${sysconfdir}/grid-services/$name" ]; then linkname="$(dealias "/etc/grid-services/$name")" if [ "$(basename "$linkname")" != "$name" ]; then echo "$name [ALIAS to $(basename "$linkname")]" else echo "$name [ENABLED]" fi elif [ -f "${sysconfdir}/grid-services/available/$name" ]; then echo "$name [DISABLED]" else echo "$name does not exist" 1>&2 rc=1 fi fi ;; esac exit $rc