debdelta/0000755000000000000000000000000012436652141007514 5ustar debdelta/debdelta_repo0000755000000000000000000002433612436652141012243 0ustar #!/usr/bin/python """ debdelta_repo Copyright (c) 2011 A. Mennucci License: GNU GPL v2 """ #TODO this scheleton does not handle 'security', where some old versions of the packages are in # a different DISTTOKEN import sys , os , tempfile , string ,getopt , tarfile , shutil , time, traceback, stat, pwd, grp from stat import ST_SIZE, ST_MTIME, ST_MODE, ST_INO, ST_DEV, S_IMODE, S_IRUSR, S_IWUSR, S_IXUSR from os.path import abspath from copy import copy from types import IntType, StringType, FunctionType, TupleType, ListType, DictType, BufferType from apt import VersionCompare __help__usage__ = "Usage: debdelta_repo [OPTION]... " __help__options__={ "verbose":"-v --verbose\n be verbose, print more informations", "workspace":"-W WORKSPACE\n directory were all the work is done", "debrepo":"-D DEBREPO\n directory of the repository of debs", } #-R #--release RELEASE #is the Debian Release file, #-d --debug # print debugging info (not really useful but for the program author) __help__ = { None : __help__usage__ +"""[COMMAND] [ARGS]..\n [command] may be one of --create --add --sos --deltas \n Use -h [command] for further help on commands""", 'create' : __help__usage__ +"""--create [ARGS]\n Creates the sqlite database SQL DB that is used to store packages' info.""", 'add' : __help__usage__ +"""--add name version arch filename disttoken or alternatively --add --stdin that reads from stdin lines with the above five arguments, tab separated it stores in the database the fact that name,version,arch has entered disttoken, and the package file is at filename (if nonabsolute, -D is used)""", 'sos' : __help__usage__ +"""--sos filename saves the filename somewhere""", 'deltas' : __help__usage__ +""" create all deltas""", } def help(cmd=None): if cmd and cmd[:2] == '--': cmd = cmd[2:] sys.stderr.write(__help__.get(cmd," UNKNOWN COMMAND ") + "\n") if cmd: sys.stderr.write("\nOptions:\n " +string.join( __help__options__.values(),"\n ")+"\n") try: from pysqlite2 import dbapi2 as dbapi except ImportError: dbapi = None if dbapi != None: # ===== sqlite machinery def convert_blob(s): return s #this is always a string # Register the adapter #sqlite.register_adapter(StringType, adapt_blob) # Register the converter dbapi.register_converter("blob", convert_blob) dbapi.register_converter("text", convert_blob) sql_scheme=""" create table package ( id integer unique primary key autoincrement, name text, version text, arch text, filename text, ownfile boolean, ctime integer ) ; create table dist ( id integer unique primary key autoincrement, disttoken text, package_id integer, generated boolean, ctime integer ) ; CREATE INDEX IF NOT EXISTS package_name ON package ( name ); CREATE INDEX IF NOT EXISTS package_name_arch ON package ( name,arch ); CREATE INDEX IF NOT EXISTS package_filename ON package ( filename ); CREATE INDEX IF NOT EXISTS dist_package_id ON dist ( package_id ); """ class theSQLdb: dbname=None sql_connection=None sql_cursor=None def __init__(self,dbname): assert type(dbname) == StringType assert os.path.exists(dbname) self.dbname=dbname self.sql_connection = dbapi.connect(dbname, detect_types=dbapi.PARSE_DECLTYPES | dbapi.PARSE_COLNAMES) self.sql_cursor = self.sql_connection.cursor() def __del__(self): self.sql_connection.close() def commit(self): self.sql_connection.commit() def add_one(self,name,version,arch,filename,disttoken,generated=0,ownfile=0,ctime=None): if ctime==None: ctime=int(time.time()) self.sql_cursor.execute('SELECT name,version,arch,id FROM package WHERE filename = ? ',\ (filename,)) tp=self.sql_cursor.fetchone() if tp: if ( tp[0] != name or tp[1] != version or tp[2] != arch): sys.stderr.write('Filename already in package database as: %s\n' % repr(tp)) return tpid=tp[3] else: self.sql_cursor.execute('INSERT INTO package VALUES (null, ?, ?, ?, ?, ?, ?)',\ (name,version,arch,filename,ownfile,ctime)) tpid=self.sql_cursor.lastrowid z=self.sql_cursor.fetchone() if z: sys.stderr.write('Warning two entries with same filename?\n') self.sql_cursor.execute('SELECT id FROM dist WHERE package_id = ? AND disttoken = ? ', (tpid,disttoken)) td=self.sql_cursor.fetchone() if td: sys.stderr.write('Package,version,arch already in dist database for this disttoken\n') #FIXME we may have added a package and no dist? return self.sql_cursor.execute('INSERT INTO dist VALUES (null, ?, ?, ?, ?)',\ (disttoken,tpid,generated,ctime)) def package_versions(self,name,disttoken,arch=None,generated=None): "returns a list of id,name,arch,version" sql_cursor1 = self.sql_connection.cursor() sql_cursor2 = self.sql_connection.cursor() if generated==None: sql_cursor1.execute('SELECT package_id FROM dist WHERE disttoken = ? ',(disttoken,)) elif generated: sql_cursor1.execute('SELECT package_id FROM dist WHERE disttoken = ? AND generate = 1',(disttoken,)) else: sql_cursor1.execute('SELECT package_id FROM dist WHERE disttoken = ? AND generate = 0',(disttoken,)) z=[] for a in sql_cursor1: if arch: sql_cursor2.execute('SELECT id,name,arch,version FROM package WHERE id = ? AND arch = ?',\ (a[0],arch)) else: sql_cursor2.execute('SELECT id,name,arch,version FROM package WHERE id = ?',(a[0])) a=sql_cursor2.fetchall() z=z+a return z def create_deltas(self): namearchtokens=[] sql_cursor1 = self.sql_connection.cursor() sql_cursor2 = self.sql_connection.cursor() sql_cursor1.execute('SELECT package_id,disttoken FROM dist WHERE generated = 0 ') for n in sql_cursor1: #TODO use joins sql_cursor2.execute('SELECT name,arch FROM package WHERE id = ? ',(n[0],)) for z in sql_cursor2: a=list(z)+[n[1]] #name,arch,disttoken if a not in namearchtokens: namearchtokens.append(a) for n in namearchtokens: versions=self.package_versions(n[0],n[2],n[1]) #TODO this is a very good place to delete extra, very old versions if len(versions) == 1: print 'Only one version for ',n,versions else: print ' Creating deltas for ',n def _cmp_(a,b): return VersionCompare(a[3],b[3]) versions.sort(cmp=_cmp_) new=versions.pop() for a in versions: print ' Create delta from ',a[3],' to ',new[3] #TODO mark all above as 'generated=1' when done, if successful def create(dbname): if os.path.exists(dbname): sys.stderr.write(sys.argv[0]+': will not overwrite already existing '+dbname+'\n') sys.exit(1) os.popen("sqlite3 '"+dbname+"'",'w').write(sql_scheme) def add(dbname, argv, stdin=None): H=theSQLdb(dbname) if stdin: for a in sys.stdin: if not a or a[0] == '#' : continue b=string.split(a,'\t') if len(b) == 5: H.add_one(*b) else: sys.stderr.write('It is not a tab separated list of 5 elements: %s\n'%repr(a)) else: if len(argv) == 5: H.add_one(*argv) else: sys.stderr.write('It was not given 5 arguments: %s\n'%repr(argv)) H.commit() def deltas(dbname): H=theSQLdb(dbname) H.create_deltas() def sos(dbname, workspace, argv): H=theSQLdb(dbname) if len(argv) != 1: sys.stderr.write('It was not given 1 arguments: %s\n'%repr(argv)) sys.exit(1) H.sql_cursor.execute('SELECT id,name,version,arch FROM package WHERE filename = ? ',argv) a=H.sql_cursor.fetchone() if not a: sys.stderr.write('Filename not found: %s\n'%repr(argv)) return print 'WILL SAVE',a,'SOMEWHERE INSIDE',workspace,' AND UPDATE SQL ACCORDINGLY' #in particular, will mark it as 'owned', so it will be deleted when it will be old if __name__ == '__main__': #argv = debugging_argv or sys.argv if len(sys.argv) <= 1: help() raise SystemExit(0) DEBUG = 0 VERBOSE = 0 JUSTHELP=False WORKSPACE=None STDIN=False cmd=None try: ( opts, argv ) = getopt.getopt(sys.argv[1:], 'hvdW:' , ('help','debug','verbose','workspace=','add','stdin','sos','create','deltas') ) except getopt.GetoptError,a: sys.stderr.write(sys.argv[0] +': '+ str(a)+'\n') raise SystemExit(2) for o , v in opts : if o == '-v' or o == '--verbose' : VERBOSE += 1 elif o == '-d' or o == '--debug' : DEBUG += 1 elif o == '--help' or o == '-h': JUSTHELP = True elif o == '-W' or o == '--workspace': WORKSPACE=v elif o == '--stdin': STDIN=True elif o[:2] == '--' and o[2:] in __help__.keys(): if cmd : sys.stderr.write(' option ',o,'is unacceptable after',cmd) raise SystemExit(1) else: cmd=o[2:] else: sys.stderr.write(' option '+o+'is unknown, try --help') raise SystemExit(1) if JUSTHELP: help(cmd) raise SystemExit(0) if not WORKSPACE: sys.stderr.write('Need a workspace. Use -W . Read --help .\n') raise SystemExit(1) dbname=os.path.join(WORKSPACE,'theSQLdb') if cmd == "create": create(dbname) elif cmd == 'add': add(dbname,argv,STDIN) elif cmd == 'sos': sos(dbname,WORKSPACE,argv) elif cmd == 'deltas': deltas(dbname) else: sys.stderr.write("Sorry this command is yet unimplemented: "+cmd+'\n') sys.exit(1) debdelta/doc/0000755000000000000000000000000012436652150010261 5ustar debdelta/doc/debdelta_suite.xml0000644000000000000000000011301712436652141013763 0ustar
The debdelta suite Andrea C. G. Mennucci April 5, 2005 debdelta is an application suite designed to compute changes between Debian packages. These changes (that we will call 'deltas') are similar to the output of the "diff" program in that they may be used to store and transmit only the changes between Debian packages. This suite contains 'debdelta-upgrade', that downloads deltas and use them to create all Debian packages needed for an 'apt-get upgrade'. 2006-2011
Overview The debdelta application suite is really composed of different applications.
debdelta debdelta computes the delta, that is, a file that encodes the difference between two Debian packages. Example: $ a=/var/cache/apt/archives $ debdelta -v $a/emacs-snapshot-common_1%3a20060512-1_all.deb \ $a/emacs-snapshot-common_1%3a20060518-1_all.deb /tmp/emacs.debdelta the result is: deb delta is 12.5% of deb ; that is, 15452kB would be saved
debpatch debpatch can use the delta file and a copy of the old Debian package to recreate the new Debian package. (This process is called "applying the delta file"). If the old Debian package is not available, but is installed in the host, it can use the installed data; in this case, '/' is used in lieu of the old .deb. Example: $ debpatch -A /tmp/emacs.debdelta / /tmp/emacs.deb
debdeltas debdeltas can be used to generate deltas for many debs at once. It will generate delta files with names such as package_old-version_new-version_architecture.debdelta. If the delta exceeds ~70% of the deb, 'debdeltas' will delete it and leave a stamp of the form package_old-version_new-version_architecture.debdelta-too-big. Example usages are in the man page; see also .
debdelta-upgrade debdelta-upgrade will download necessary deltas and apply them to create debs for a successive apt-get upgrade. The deltas are available for upgrades in 'stable' , 'stable-security' , 'testing', 'unstable' and 'experimental', for i386 and amd64. Example usage: # apt-get update && debdelta-upgrade && apt-get upgrade If run by a non-root user, debs are saved in /tmp/archives : do not forget to move them in /var/cache/apt/archives debdelta-upgrade will also download .debs for which no delta is available (this is done in parallel to patching, to maximize speed). See the explanation of "debdelta-upgrade --deb-policy" in the man page for more informations and customization on which debs get downloaded. More informations are in next sections.
debforensic There is also another bunch of code (that though was never distributed.... it is available in the GIT repo). . debforensics creates and uses sqlite databases containing information regarding debian binary packages. debforensics --add will scan debian packages and add the list of files (and SHA1 hashes of them) to the database. debforensics --scan will check a file against multiple databases, to see if that file is part of any package. debforensics --forensic will scan a filesystem and list files that are part of a package, and files that are not (or are missplaced, or have strange permissions....). If debdelta-upgrade fails to apply a delta, and '-d' is passed, then a debug file is generated, and then debforensic may be used to understand what went wrong (theoretically). Beware: a full database for main/amd64 is ~350MBs, without indexes. So in practice currently I cannot keep a database in my host.
a delta The delta is 'ar' archive (see 'man ar'). The delta contains 'info', some data members (named by numbers), a script named 'patch.sh.xxx', and optional gpg signatures. The script recreates the new deb. See do_delta_() in the python code for more details.
the info in a delta a delta first 'ar' member is always named 'info', and is a text file containing some keywords and informations regarding the delta itself. [TODO add details]
how to apply a delta TODO WRITEME. You may look into /usr/share/debdelta/debpatch.sh to understand the basics.
debdelta-upgrade service In June 2006 I set up a delta-upgrading framework, so that people may upgrade their Debian box using debdelta-upgrade (that downloads package 'deltas'). This section is an introduction to the framework that is behind 'debdelta-upgrade', and is also used by 'cupt'. In the following, I will simplify (in places, quite a lot).
The framework The framework is so organized: I keep up some servers where I use the program 'debdeltas' to create all the deltas; whereas endusers use the client 'debdelta-upgrade' to download the deltas and apply them to produce the debs needed to upgrade their boxes. In my server, I mirror some repositories, and then I invoke 'debdeltas' to make the deltas between them. I use the scripts /usr/share/debdelta/debmirror-delta-security and /usr/share/debdelta/debmirror-marshal-deltas for this. This generates any delta that may be needed for upgrades in squeeze,squeeze-security,wheezy,sid,experimental, for architectures i386 and amd64 (as of Mar 2011); the generated repository of deltas is more or less 10GB.
The goals There are two ultimate goals in designing this framework: SMALL) reduce the size of downloads (fit for people that pay-by-megabyte); FAST) speed up the upgrade. The two goals are unfortunately only marginally compatible. An example: bsdiff can produce very small deltas, but is quite slow (in particular with very large files); so currently (2009 on) I use 'xdelta3' as the backend diffing tool for 'debdeltas' in my server. Another example is in debs that contain archives ( .gz, , tar.gz etc etc): I have methods and code to peek inside them, so the delta become smaller, but the applying gets slower.
The repository structure The repository of deltas is just a HTTP archive; it is similar to the pool of packages; that is, if foobar_1_all.deb is stored in pool/main/f/foobar/ in the repository of debs, then the delta to upgrade it will be stored in pool/main/f/foobar/foobar_1_2_all.debdelta in the repository of deltas. Contrary to the repository of debs, a repository of deltas has no indexes, see . The delta repository is in http://debdeltas.debian.net/debian-deltas.
The repository creation Suppose that the unstable archive, on 1st Mar, contains foobar_1_all.deb (and it is in pool/main/f/foobar/ ) ; then on 2nd Mar, foobar_2_all.deb is uploaded; but this has a flaw (e.g. FTBFS) and so on 3rd Mar foobar_3_all.deb is uploaded. On 2nd Mar, the delta server generates pool/main/f/foobar/foobar_1_2_all.debdelta On 3rd Mar, the server generates both pool/main/f/foobar/foobar_1_3_all.debdelta pool/main/f/foobar/foobar_2_3_all.debdelta. So, if the end-user Ann upgrades the system on both 2nd and 3rd Mar, then she uses both foobar_1_2_all.debdelta (on 2nd) and foobar_2_3_all.debdelta (on 3rd Mar). If the end-user Boe has not upgraded the system on 2nd Mar, , and he upgrades on 3rd Mar, then on 3rd Mar he uses foobar_1_3_all.debdelta.
size limit Note that currently the server rejects deltas that exceed 70% of the deb size: indeed the size gain would be too small, and the time would be wasted, if you sum the time to download the delta and the time to apply it (OK, these are run as much as possible in parallel, yet ....). Also, the server does not generate delta for packages that are smaller than 10KB.
/etc/debdelta/sources.conf Consider a package that is currently installed. It is characterized by name installed_version architecture (unfortunately there is no way to tell from which archive it came from, but this does not seem to be a problem currently) Suppose now that a newer version is available somewhere in an archive, and that the user wishes to upgrade to that version. The archive Release file contain these info: Origin , Label , Site, Archive. (Note that Archive is called Suite in the Release file). Example for the security archive: Origin=Debian Label=Debian-Security Archive=stable Site=security.debian.org The file /etc/debdelta/sources.conf , given the above info, determines the host that should contain the delta for upgrading the package. This information is called "delta_uri" in that file. The complete URL for the delta is built adding to the delta_uri a directory path that mimicks the "pool" structure used in Debian archives, and appending to it a filename of the form name_oldversion_newversion_architecture.debdelta. All this is implemented in the example script contrib/findurl.py . If the delta is not available at that URL, and name_oldversion_newversion_architecture.debdelta-too-big is available, then the delta is too big to be useful. If neither is present, then, either the delta has not yet been generated, or it will never be generated... but this is difficult to know.
indexes
indexes of debs in APT Let's start examining the situation for debs and APT. Using indexes for debs is a no-brainer decision: indeed, the client (i.e. the end user) does not know the list of available debs in the server, and, even knowing the current list, cannot foresee the future changes. So indexes provide needed informations: the packages' descriptions, versions, dependencies, etc etc; these info are used by apt and the other frontends.
no indexes of deltas in debdelta If you then think of deltas, you realize that all requirements above fall. Firstly there is no description and no dependencies for deltas. deltas have a "info" section, but that is, as to say, standalone Of course 'debdelta-upgrade' needs some information to determine if a delta exists, and to download it; but these information are already available: the name of the package P the old version O the new version N the architecture A Once these are known, the URL of the file F can be algorithmically determined as URI/POOL/P_O_N_A.debdelta where URI is determined from /etc/debdelta/sources.conf and POOL is the directory in the pool of the package P . This algorithm is also implemented (quite verbosely) in contrib/findurl.py in the sources of debdelta. This is the reason why currently there is no "index of deltas", and nonetheless 'debdelta-upgrade' works fine (and "cupt" as well). Adding an index of file would only increase downloads (time and size) and increase disk usage; with negligeable benefit, if any.
no incremental deltas Let me add another point that may be unclear. There are no incremental deltas (and IMHO never will be).
What "incremental" would be, and why it is not Please recall . What does not happen currently is what follows: on 3rd Mar , Boe decides to upgrade, and invokes 'debdelta-upgrade'; then 'debdelta-upgrade' finds foobar_1_2_all.debdelta and foobar_2_3_all.debdelta , it uses the foremost to generate foobar_2_all.deb, and in turn it uses this and the second delta to generate foobar_3_all.deb . This is not implemented, and it will not, for the following reasons. The delta size is, on average, 40% of the size of the deb (and this is getting worse, for different reasons, see ); so two deltas are 80% of the target deb, and this too much. It takes time to apply a delta; applying two deltas to produce one deb takes too much time. The server does generate the direct delta foobar_1_3_all.debdelta :-) so why making things complex when they are easy? :-) Note also that incremental deltas would need some index system to be implemented... indeed, Boe would have no way to know on 3rd Mar that the intermediate version of foobar between "1" and "3" is "2"; but since incremental deltas do not exist, then there is no need to have indexes).
Repository howto There are (at least) two ways two manage a repository, and run a server that creates the deltas
debmirror --debmarshal The first way is what I currently use. It is implemented in the script /usr/share/debdelta/debmirror-marshal-deltas (a simpler version, much primitive but more readable , is /usr/share/debdelta/debmirror-delta-security) Currently I use the complex script that creates deltas for amd64 and i386, and for lenny squeeze sid experimental ; and the simpler one for lenny-security. Let me start outlining how the simple script generate deltas . It is a 3 steps process. Lets say that $secdebmir is the directory containg the mirror of the repository security.debian.org. --- 1st step #make copy of current stable-security lists of packages olddists=${TMPDIR:-/tmp}/oldsecdists-`date +'%F_%H-%M-%S'` mkdir $olddists cp -a $secdebmir/dists $olddists --- 2nd step call 'debmirror' to update the mirror ; note that I apply a patch to debmirror so that old debs are not deleted , but moved to a /old_deb directory --- 3rd step call 'debdeltas' to generate deltas , from the state of packages in $olddists to the current state in $secdebmir , and also wrt what is in stable. Note that, for any package that was deleted from the archive, then 'debdeltas' will go fishing for it inside /old_deb . The more complex script uses the new debmirror --debmarshal so it keeps 40 old snapshots of the deb archives, and it generates deltas of the current package version (the "new" version) to the versions in snapshots -10,-20,-30,-40.
hooks and repository of old_debs I wrote the scheleton for some commands. debdelta_repo--add name version arch filename disttoken This first one is to be called by the archive management tool (e.g. DAK) when a new package enters in a part of the archive (lets say, package="foobar" version="2" arch="all" and filename="pool/main/f/foobar/foobar_2_all.deb" just entered disttoken="testing/main/amd64"). That command will add that to a delta queue, so appropriate deltas will be generated; this command returns almost immediately. debdelta_repo--delta This does create all the deltas. debdelta_repo--sos filename This will be called by DAK when (before) it does delete a package from the archive; this command will save that old deb somewhere (indeed it may be needed to generate deltas sometimes in the future). (It will be up to some piece of debdelta_repo code to manage the repository of old debs, and delete excess copies). TODO that scheleton does not handle 'security', where some old versions of the packages are in a different DISTTOKEN
Goals, tricks, ideas and issues
exact patching When debpatch or debdelta-upgrade recreates a .deb, it will be identical to the desired one (so it may be possible to check it using the security features in APT note though that debdelta-upgrade saves the recontructed debs in /var/cache/apt/archives, and APT does not check them there, AFAICT). See though .
exact recompression Suppose a .deb has inside a huge file /usr/share/doc/foobar/document.info.gz and this starts with a RCS tag ... then each time it is released, the file will be different even though just few bytes were changed. Another examples are manpages that start with the header containing the version of the command. So , to get good compression of the difference, I had to be able to gunzip those files, diff them, and gzip back them exactly identical (but possibly for headers the re-gzipped files are identical but for headers, (indeed gzip headers contain sometimes a timestamp ); but this is not a problem since the reconstructed gzipeed file is then piped again into 'xdelta3' or 'bsdiff' to rebuild the 'data.tar', so the header is fixed at that stage ) For this reason, I studied gzip formats, and I wrote in debdelta some python code that does the trick (90% of the times...). This is implemented in the python routine delta_gzipped_files.
speed
some (old) numbers Warning: this section is referred to experiments done in 2006, and the backend for delta encoding was 'xdelta'. On a desktop with CPU Athlon64 3000 and a average hard disk, $ debdelta mozilla-browser_1.7.8-1sarge3_i386.deb \ mozilla-browser_1.7.8-1sarge6_i386.deb /tmp/m-b.debdelta processes the 10Mb of mozilla-browser in ~11sec, that is a speed of ~900kB per second. Then debpatch applies the above delta in 16sec, at a speed of ~600kB per second. Numbers drop in a old PC, or in a notebook (like mine, that has a Athlon 1600MHz and slow disks), where data are chewed at ~200kB per second. Still, since I have a ADSL line that downloads at max 80kB per second, I have a benefit downloading deltas. In a theoretical example, indeed, to download a 80MB package, it would take 1000seconds; whereas to download a delta that is 20% of 80MB it takes 200seconds, and then 80MB / (200kB/sec) = 400seconds to apply it, for a total of 600seconds. So I may get a "virtual speed" of 80MB / 600sec = 130kB/sec . Note that delta downloading and delta patching is done in parallel: if 4 packages as above have to be downloaded, then the total time for downloading of full debs would be 4000seconds, while the time for parallel-download-patch-apply-patch may be as low as 1400seconds. This is a real example of running 'debdelta-upgrade' : Looking for a delta for libc6 from 2.3.6-9 to 2.3.6-11 Looking for a delta for udev from 0.092-2 to 0.093-1 Patching done, time: 22sec, speed: 204kB/sec, result: libc6_2.3.6-11_i386.deb Patching done, time: 4sec, speed: 57kB/sec, result: udev_0.093-1_i386.deb Delta-upgrade download time 28sec speed 21.6k/sec total time: 53sec; virtual speed: 93.9k/sec. (Note that the "virtual speed" of 93.9k/sec , while less than the 130kB/sec of the theoretical example above, is still more than the 80kB that my ADSL line would allow). Of course the above is even better for people with fast disks and/or slow modems. Actually, an apt delta method may do a smart decision of how many deltas to download, and in which order, to optimize the result, (given the deltas size, the packages size, the downloading speed and the patching speed).
speeding up The problem is that the process of applying a delta to create a new deb is currently slow, even on very fast machines. One way to overcome is to "parallelize as much as possible". The best strategy that I can imagine is to keep both the CPU, the hard disk, and the Internet connection, always maxed up. This is why 'debdelta-upgrade' has two threads, the "downloading thread" and the "patching thread". The downloading thread downloads deltas (ordered by increasing size), and as soon as they are downloaded, it queues them to be applied in the "patching thread"; whereas as soon as all available deltas are downloaded it starts downloading some debs, and goes on for as long as the deltas are being applied in the "patching thread". Summarizing, the downloading thread keeps Internet busy while the patching thread keeps the CPU and HDD busy. Another speedup strategy is embedded inside the deltas themselves: since bsdiff is a memory hog, when the backend is bsdiff, I have to divide the data in chunks; this may lower the compression ratio, but the good point is that the HDD accesses and the calls to bsdiff can run "in parallel". With newer xdelta3, xdelta3 can read the original data from a pipe, so the data are not divided in chunks, but rather continously piped into xdelta3; so xdelta3 runs at the same time as when the data are read from HDD.
the 10kb trick currently, roughly half of the generated deltasthat is, discarding those that are more than 70% of the corresponding deb are less than 10KB. debdelta-upgrade downloads deltas in two passes, in the first pass it tries to download the first 10KB of a delta; if it gets a complete delta, it immediatly pipes it in the "patching thread queue", otherwise if it gets only a partial download, it adds it to the download queue; if it gets HTTP404, it possibly checks for the "toobig" timestamp, and it possibly warns the user. in the second pass, it downloads the rest of the deltas, and queues them for patching Why this complex method? because the first 10KBs of a delta contain the info, and those may be used to actually decide not to download the rest of the delta (if a TODO predictor decides that it is not worthwhile...).
the choice, the predictor Which deltas should be downloaded, VS which debs? Currently there is a rule-of-thumb: the server immediately deletes any delta that exceeds 70% of the original deb , and it replaces it with an empty file ending in ".debdelta-too-big". In such cases, "debdelta-upgrade" will download the deb instead. See the explanation of "debdelta-upgrade --deb-policy" in the man page for more info and customization on which debs get downloaded. Some time ago I tried to do devise a better way to understand when to download a delta w.r.t. a deb. The code is in the "Predictor" class .... but I could not reliably predict the final speed of patching, so currently it is not used.
State of the art All in all, I still cannot obtain high speeds: so people that have a fast ADSL Internet connection usually are better downloading all the debs, and ignoring "debdelta-upgrade" alltogether. Anyway, the best way to know is to try "debdelta-upgrade -v" and read the final statistics. See and for recent developments.
better deb compression is a worse delta 'xdelta3' can reconstruct data at high speed: on nowadays processors, it can process up to 2MB per second; but, when applying a delta, 'xdelta3' works on uncompressed data. So if the data is then compressed at a ratio 1/3, then the resulting speed on compressed data is 700KB/sec. Moreover, time is needed to actually compress the data. In recent years, 'dpkg' has transitioned from 'data.tar.gz' to 'data.tar.bz2' to 'data.tar.lzma'; each method is better at compressing, but is also slower than the previous one; since it is better at compressing, it also defeats the ability of 'debdelta' to produce small deltas (wrt the original deb, of course), and indeed statistics show that deltas are getting larger; since it is slower, it slows down the applying of deltas as well.
long time recovery As aforementioned, deltas can rebuild the deb identically to the byte. But the patch.sh script calls the standard tools 'tail','head','zgip','bzip2','lzma', etc etc to rebuild a delta; so if the argument calling or output of any of those tools changes, than a delta may become unusable. As long as deltas are used for the debdelta-upgrade service, this is no big deal: if such a tool changes, then we can adjust the deltas to it, and there is just some days disruption of the service this actually already happened some years ago, with libzip (and people will download debs instead of deltas .... as we used to). If anybody wants instead to use debdelta to archive debs for long time, (as the archive.debian.org service was doing), then we should make sure that , at any moment in future, deltas can be applied. A possible solution would be that deltas should contain, in the info files, the versions of all tools that are needed for applying. A second solution is that debdelta should keep a standard set of those tools inside the package.
streaming Let me summarize. When 'debdelta-upgrade' (or 'debpatch') recreates a deb, one step is reassembling the data.tar part inside it; this part moreover is compressed (gzip, bzip2 or lately lzma). This 'reassembling and compressing' takes time (both for CPU and for HD), and is moreover quite useless, since, in short time, 'apt' will call 'dpkg -i' that decompresses and reopens the data.tar in the deb. It is then reasonable to collapse this two parts, and this would possibly speed up the upgrade a bit. A first step is '--format=unzipped' , a next step may be '--format=preunpacked' .
--format=unzipped The recently introduced new --format=unzipped may speed up package upgrades. If you call 'debdelta-upgrade' with the option '--format=unzipped' , then in the recreated deb the data.tar part will not be compressed. This may speedup the 'debdelta-upgrade' + 'apt-get upgrade' process. Indeed, writing to hard disk is fast (let's say 5MB/sec, but usually much more); whereas compressing random data with 'bzip2 -9' or 'lzma -9' is much slower (let's say 2.0MB/sec and 1.5 MB/sec) ; and moreover the compressed data is then decompressed by dpkg when installing; so avoiding the compress/decompress should be a win/win (unless you run out of disk space...). Indeed I see that the creation of deltas is much faster; but I still do not have enough data collected....
--format=preunpacked Here is another idea. When 'debdelta-upgrade' is called in upgrading a package 'foobar' it currently creates 'foobar_2.deb'. By an appropriate cmdline switch '--format=preunpacked', instead of creating a 'foobar_2.deb' , it directly saves all of its file to the filesystem, and it adds an extension to all the file names, making sure that no file name conflicts (=overwrite) with a preexisting file on the filesystem ; then it creates a file 'foobar_2.deb_preunpacked' , that is a deb package were 'data.tar.xxx' is replaced with 'data_list', just a text file specifying the contents of 'data.tar.xxx' and where regular files were temporarily unpacked. Note that the above idea overlaps a lot with the SummerOfCode2010 StreamingPackageInstall debdelta-upgrade --format=preunpacked is now implemented as a proof-of-concept (it does not really write temporary files to HD yet). The format of data_list is Files: TYPE MODE USER GROUP MTIME NAME_FILE_WAS_UNPACKED_TO (if regular file) ORIGINAL_FILENAME LINK_NAME (if link) [repeat] Example of data_list Files: d 0755 root root 1304626623 ./etc - 0644 root root 1304626594 /./etc/gnashrc_1956_debdelta_preunpacked ./etc/gnashrc l 0777 root root 1304626629 ./usr/share/man/man1/gtk-gnash.1.gz gnash.1.gz PROS: (1) may be faster; (2) if you need to upgrade a 100MB package, you do not need to save both the deb and (while 'dpkg --unpack') the whole new deb data : so there is less risk of running our of disk space. CONS: (1) you cannot install that "preunpacked deb" twice (so dpkg should probably remove it once it has installed it); (2) you cannot move it to another host; (3) when "apt-get clean", all temporary files have to be removed as well. So it may be a good idea to use ".deb_preunpacked" as extension for them. And I would recommend using '--format=unzipped' for essential packages such as the kernel. If you like the idea, someone should help in changing 'dpkg' so that it would be able to install starting from 'foobar_2.deb_preunpacked'. And change APT so that it would interact with 'debdelta' to create the 'foobar_2.deb_unpacked' files, and pass them to dpkg (and clean them properly).
Todo
todo list Prepare an APT method so that 'apt-get upgrade' would actually use deltas. Some code is already written. See also 2011 Google Summer of Code. As in . It would be nice if debdelta-upgrade would actually choose if download a delta and use it to create the .deb download the deb depending on which one would be faster. Unfortunately, this decision must depend on a good model to predict the speed of patching... and this I still cannot achieve. in debdelta-upgrade, have as many "patching thread" as there are cores upgrade debdelta-upgrade to newer libapt support multiarch collect data, benchmark! (some debdelta behaviours are coded in magic numbers that I got from thumb reasoning on small datasets) support long time exact recovery : embed a copy of gzip, libzip, bzip2 and lzma in debdelta??
things are getting worse W.r.t. to when I started deploying debdelta, things got worse, for two reasons, one problem is delta backends are bad at compressing a binary that was compiled from the same source but with twi different compilers; see in particular the Google Courgette project, and compare it with the problems I encountered lately when Debian switched from GCC 4.4 to 4.5, when it happened that the binaries were so different that the compression of the new binary with LZMA would be smaller than the BSDIFF of the old and the new binary (!!). Unfortunately it seems that Google Courgette was hit with a patent infringment so we should study how to reduce the size of deltas, and/or making them faster (possibly implementing lzma in xdelta3; or automatically choosing 'bsdiff' vs 'xdelta3' depending on the situation).
debdelta/doc/Makefile0000644000000000000000000000034712436652141011725 0ustar all: html/index.html debdelta_suite.pdf html: mkdir html html/index.html: html debdelta_suite.xml rm html/*.html docbook2html -o html debdelta_suite.xml debdelta_suite.pdf: debdelta_suite.xml docbook2pdf debdelta_suite.xml debdelta/doc/debdelta_suite.txt0000644000000000000000000001142512436652150014002 0ustar The debdelta suite

The debdelta suite

Andrea C. G. Mennucci

debdelta is an application suite designed to compute changes between Debian packages. These changes (that we will call 'deltas') are similar to the output of the "diff" program in that they may be used to store and transmit only the changes between Debian packages. This suite contains 'debdelta-upgrade', that downloads deltas and use them to create all Debian packages needed for an 'apt-get upgrade'.


Table of Contents
1. Overview
1.1. debdelta
1.2. debpatch
1.3. debdeltas
1.4. debdelta-upgrade
2. Todo

1. Overview

The debdelta application suite is really composed of different applications.


1.1. debdelta

'debdelta' computes the delta, that is, a file that encodes the difference between two Debian packages. Example:


$ a=/var/cache/apt/archives 
$ debdelta -v $a/emacs-snapshot-common_1%3a20060512-1_all.deb \
  $a/emacs-snapshot-common_1%3a20060518-1_all.deb /tmp/emacs.debdelta
the result is: deb delta is 12.5% of deb ; that is, 15452kB would be saved


1.2. debpatch

'debpatch' can use the delta file and a copy of the old Debian package to recreate the new Debian package. (This process is called "applying the delta file"). If the old Debian package is not available, but is installed in the host, it can use the installed data; in this case, '/' is used in lieu of the old .deb. Example:


$ debpatch -A  /tmp/emacs.debdelta / /tmp/emacs.deb


1.3. debdeltas

'debdeltas' can be used to generate deltas for many debs at once. It will generate delta files with names such as package_old-version_new-version_architecture.debdelta and put them in the directory where the new .deb is. If the delta exceeds ~70% of the deb, 'debdeltas' will delete it and leave a stamp of the form package_old-version_new-version_architecture.debdelta-too-big Example usages are in the man page; see also the scripts /usr/share/debdelta/debmirror-delta-security and /usr/share/debdelta/debmirror-deltas


1.4. debdelta-upgrade

This command will download necessary deltas and apply them to create debs for an 'apt-get upgrade' . The deltas are available for upgrades in 'stable' , 'stable-security' , 'testing', 'unstable' and 'experimental', for i386 and amd64. Example usage:


# apt-get update && debdelta-upgrade && apt-get upgrade
If run by a non-root user, debs are saved in /tmp/archives : do not forget to move them in /var/cache/apt/archives debdelta-upgrade will also download .debs for which no delta is available (this is done in parallel to patching, to maximize speed). See the explanation of "debdelta-upgrade --deb-policy" in the man page for more info and customization on which debs get downloaded. More info are in README.upgrade


2. Todo

  1. Prepare an APT method so that 'apt-get upgrade' would actually use deltas. Some code is already written.

  2. It would be nice if debdelta-upgrade would actually choose if

    • download a delta and use it to create the .deb

    • download the deb depending on which one would be faster.

    Unfortunately, this decision must depend on a good model to predict the speed of patching... and this I still cannot achieve.

debdelta/doc/debdelta_suite.pdf0000644000000000000000000047464412436652150013754 0ustar %PDF-1.4 % 1 0 obj << /S /GoTo /D (0.1.1) >> endobj 4 0 obj (1. Overview) endobj 5 0 obj << /S /GoTo /D (0.1.1.2) >> endobj 8 0 obj (1.1. debdelta) endobj 9 0 obj << /S /GoTo /D (0.1.2.2) >> endobj 12 0 obj (1.2. debpatch) endobj 13 0 obj << /S /GoTo /D (0.1.3.2) >> endobj 16 0 obj (1.3. debdeltas) endobj 17 0 obj << /S /GoTo /D (0.1.4.2) >> endobj 20 0 obj (1.4. debdeltaupgrade) endobj 21 0 obj << /S /GoTo /D (0.1.5.2) >> endobj 24 0 obj (1.5. debforensic) endobj 25 0 obj << /S /GoTo /D (0.2.1) >> endobj 28 0 obj (2. a delta) endobj 29 0 obj << /S /GoTo /D (0.2.6.2) >> endobj 32 0 obj (2.1. the info in a delta) endobj 33 0 obj << /S /GoTo /D (0.2.7.2) >> endobj 36 0 obj (2.2. how to apply a delta) endobj 37 0 obj << /S /GoTo /D (0.3.1) >> endobj 40 0 obj (3. debdeltaupgrade service) endobj 41 0 obj << /S /GoTo /D (0.3.8.2) >> endobj 44 0 obj (3.1. The framework) endobj 45 0 obj << /S /GoTo /D (0.3.9.2) >> endobj 48 0 obj (3.2. The goals) endobj 49 0 obj << /S /GoTo /D (0.3.10.2) >> endobj 52 0 obj (3.3. The repository structure) endobj 53 0 obj << /S /GoTo /D (0.3.11.2) >> endobj 56 0 obj (3.4. The repository creation) endobj 57 0 obj << /S /GoTo /D (0.3.12.2) >> endobj 60 0 obj (3.5. size limit) endobj 61 0 obj << /S /GoTo /D (0.3.13.2) >> endobj 64 0 obj (3.6. /etc/debdelta/sources.conf) endobj 65 0 obj << /S /GoTo /D (0.3.14.2) >> endobj 68 0 obj (3.7. indexes) endobj 69 0 obj << /S /GoTo /D (0.3.14.1.3) >> endobj 72 0 obj (3.7.1. indexes of debs in APT) endobj 73 0 obj << /S /GoTo /D (0.3.14.2.3) >> endobj 76 0 obj (3.7.2. no indexes of deltas in debdelta) endobj 77 0 obj << /S /GoTo /D (0.3.15.2) >> endobj 80 0 obj (3.8. no incremental deltas) endobj 81 0 obj << /S /GoTo /D (0.3.15.3.3) >> endobj 84 0 obj (3.8.1. What "incremental" would be, and why it is not) endobj 85 0 obj << /S /GoTo /D (0.3.16.2) >> endobj 88 0 obj (3.9. Repository howto) endobj 89 0 obj << /S /GoTo /D (0.3.16.4.3) >> endobj 92 0 obj (3.9.1. debmirror debmarshal) endobj 93 0 obj << /S /GoTo /D (0.3.16.5.3) >> endobj 96 0 obj (3.9.2. hooks and repository of olddebs) endobj 97 0 obj << /S /GoTo /D (0.4.1) >> endobj 100 0 obj (4. Goals, tricks, ideas and issues) endobj 101 0 obj << /S /GoTo /D (0.4.17.2) >> endobj 104 0 obj (4.1. exact patching) endobj 105 0 obj << /S /GoTo /D (0.4.18.2) >> endobj 108 0 obj (4.2. exact recompression) endobj 109 0 obj << /S /GoTo /D (0.4.19.2) >> endobj 112 0 obj (4.3. speed) endobj 113 0 obj << /S /GoTo /D (0.4.19.6.3) >> endobj 116 0 obj (4.3.1. some \(old\) numbers) endobj 117 0 obj << /S /GoTo /D (0.4.19.7.3) >> endobj 120 0 obj (4.3.2. speeding up) endobj 121 0 obj << /S /GoTo /D (0.4.19.8.3) >> endobj 124 0 obj (4.3.3. the 10kb trick) endobj 125 0 obj << /S /GoTo /D (0.4.19.9.3) >> endobj 128 0 obj (4.3.4. the choice, the predictor) endobj 129 0 obj << /S /GoTo /D (0.4.19.10.3) >> endobj 132 0 obj (4.3.5. State of the art) endobj 133 0 obj << /S /GoTo /D (0.4.20.2) >> endobj 136 0 obj (4.4. better deb compression is a worse delta) endobj 137 0 obj << /S /GoTo /D (0.4.21.2) >> endobj 140 0 obj (4.5. long time recovery) endobj 141 0 obj << /S /GoTo /D (0.4.22.2) >> endobj 144 0 obj (4.6. streaming) endobj 145 0 obj << /S /GoTo /D (0.4.23.2) >> endobj 148 0 obj (4.7. format=unzipped) endobj 149 0 obj << /S /GoTo /D (0.4.24.2) >> endobj 152 0 obj (4.8. format=preunpacked) endobj 153 0 obj << /S /GoTo /D (0.5.1) >> endobj 156 0 obj (5. Todo) endobj 157 0 obj << /S /GoTo /D (0.5.25.2) >> endobj 160 0 obj (5.1. todo list) endobj 161 0 obj << /S /GoTo /D (0.5.26.2) >> endobj 164 0 obj (5.2. things are getting worse) endobj 165 0 obj << /S /GoTo /D [166 0 R /Fit ] >> endobj 168 0 obj << /Length 1056 /Filter /FlateDecode >> stream xڝV]8}_FHw>kT%oUP $&03}2${5st<˝g>qHSNyrzWKd}*I8m}p%>x󎙸H_qe 4 IR4v\ci78\_XH @BE.06h")0>%~FBo] Rnֶʌʲ1?hcDy,okn ʏ`0fp>}iii6_/. rbfLГbUeibQ0L(벢ݔ,̿I|y ׇ{CkM)mTqsn5*$9Sn(uiuov`<߬ᒖ\LJZBV 7Z^54VD/3V}Q^:DeI@VtSh+#3Zhn"G oeAaLxJa~vxb]gMqT-B^@nRk?n+MFdB.(fڸmϴf4+$]V瑀!&&177Wtj Wpڊ.TnOڧbG\A=`?d:_9V֭Ctյd1q.0RTPLv'u6ɯ}tC`e{3 7{Tf x"`XLnBOG{aԎV%u}6kl T?~O@z"Yl6:pfLiՅnڽ^ג_*ĽlءE4)|4+fWwMi1ಔ# sVi}Lgh9V9a\mWSf WBzZ;Spph%O =i:pӱyQqO">2 endstream endobj 166 0 obj << /Type /Page /Contents 168 0 R /Resources 167 0 R /MediaBox [0 0 609.714 789.041] /Parent 189 0 R >> endobj 169 0 obj << /D [166 0 R /XYZ -1.269 814.22 null] >> endobj 170 0 obj << /D [166 0 R /XYZ 71.731 718.306 null] >> endobj 171 0 obj << /D [166 0 R /XYZ 71.731 718.306 null] >> endobj 174 0 obj << /D [166 0 R /XYZ 76.712 511.615 null] >> endobj 2 0 obj << /D [166 0 R /XYZ 167.344 466.361 null] >> endobj 175 0 obj << /D [166 0 R /XYZ 71.731 446.221 null] >> endobj 176 0 obj << /D [166 0 R /XYZ 71.731 431.327 null] >> endobj 6 0 obj << /D [166 0 R /XYZ 206.46 394.112 null] >> endobj 177 0 obj << /D [166 0 R /XYZ 71.731 374.007 null] >> endobj 178 0 obj << /D [166 0 R /XYZ 71.731 374.007 null] >> endobj 180 0 obj << /D [166 0 R /XYZ 71.731 336.164 null] >> endobj 182 0 obj << /D [166 0 R /XYZ 71.731 266.097 null] >> endobj 10 0 obj << /D [166 0 R /XYZ 211.094 228.164 null] >> endobj 183 0 obj << /D [166 0 R /XYZ 71.731 205.269 null] >> endobj 184 0 obj << /D [166 0 R /XYZ 71.731 205.269 null] >> endobj 185 0 obj << /D [166 0 R /XYZ 71.731 148.02 null] >> endobj 186 0 obj << /D [166 0 R /XYZ 71.731 124.388 null] >> endobj 187 0 obj << /D [166 0 R /XYZ 71.731 48.817 null] >> endobj 167 0 obj << /Font << /F32 172 0 R /F35 173 0 R /F37 179 0 R /F38 181 0 R /F39 188 0 R >> /ProcSet [ /PDF /Text ] >> endobj 193 0 obj << /Length 1684 /Filter /FlateDecode >> stream xڝXKs6Whi,͘4ߏ̤;uK:%NH%@!=(`/*X}#.W_fQ>,(_qi֫o( 5~qːDۿ%E0$4/NR?ӕ~oB?AB^yKLNy.[ӹҰ,Y!"{PZOrzaGČUBX?\t %LGcڀ|¾\ީWm/ʗoAl/WuX-^eJTbUY͏C?MBmhm=6=NXoFXKզ>* asqLP٢3~Uz͌lڅ^c nGDh'ڣP7\-@؎_zgfq~ȟA֐.BVkcSa) Ƥ=hM_:{@GiSYXOa7=Νsϳ\6E=R\ ”x/hڣAE5Y?I$ŠږVlqGԡv8Ej"@#ԴŎ2uLs/qUݜ}Q\McÎX3LYCvD0[e#et0j)F&.2{]%BHCA@Rj˂ ajx~^>Ϩi}JO'~s8uEz}L17A}V,ն{oY7܊$u:Đz~zeO׽Jox[jo9^%8v! 1|75pMYhDm[!1ns5jP_Pqc'(jADݻe<{C6hz/WSԏ^[.n&zm(9gsךi%F=Dt%Xr]S{${ 2!8cf mV}mQlO; 4IF g%}Q"5>mU6 $b8Y/JA J\ӵCۥ,BZFfuN6#(Q|UP{xFe@pT0f٘!%/<^l#<ɋƫEuCQ35՚Uu(ÉgN{(W{Dz Pz 3(DxSi2P.Ze0b199bPV%&W ѰtS}N9ܴߍ@t*<#EG"֨\UQ*ϒ%32f5*M'PL^0ZA/l1n^4=Φv PV+\+zlNG~TT3yѭ\.я0 5^Ьyq-k;Rc qȒ;'~C8j.!l7|Qu~H 53ijn^0š Ef4I(@w £p;"Lө$Ctn|]Io T&P7 | }܊F~g_>R.FS.bjA{C>QJ%e`0A#8t'2``ht&w7DqiLJ:i{Zy' endstream endobj 192 0 obj << /Type /Page /Contents 193 0 R /Resources 191 0 R /MediaBox [0 0 609.714 789.041] /Parent 189 0 R /Annots [ 190 0 R ] >> endobj 190 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [220.93 579.89 267.754 590.794] /A << /S /GoTo /D (0:repo_howto) >> >> endobj 194 0 obj << /D [192 0 R /XYZ -1.269 814.22 null] >> endobj 14 0 obj << /D [192 0 R /XYZ 214.436 667.726 null] >> endobj 195 0 obj << /D [192 0 R /XYZ 71.731 647.622 null] >> endobj 196 0 obj << /D [192 0 R /XYZ 71.731 647.622 null] >> endobj 197 0 obj << /D [192 0 R /XYZ 151.093 621.898 null] >> endobj 198 0 obj << /D [192 0 R /XYZ 119.552 595.995 null] >> endobj 199 0 obj << /D [192 0 R /XYZ 71.731 579.641 null] >> endobj 18 0 obj << /D [192 0 R /XYZ 267.832 542.426 null] >> endobj 200 0 obj << /D [192 0 R /XYZ 71.731 519.309 null] >> endobj 201 0 obj << /D [192 0 R /XYZ 71.731 519.309 null] >> endobj 202 0 obj << /D [192 0 R /XYZ 119.552 496.598 null] >> endobj 203 0 obj << /D [192 0 R /XYZ 71.731 471.527 null] >> endobj 204 0 obj << /D [192 0 R /XYZ 119.552 414.506 null] >> endobj 205 0 obj << /D [192 0 R /XYZ 71.731 393.2 null] >> endobj 206 0 obj << /D [192 0 R /XYZ 71.731 333.644 null] >> endobj 207 0 obj << /D [192 0 R /XYZ 71.731 321.504 null] >> endobj 22 0 obj << /D [192 0 R /XYZ 228.495 282.232 null] >> endobj 208 0 obj << /D [192 0 R /XYZ 71.731 262.127 null] >> endobj 209 0 obj << /D [192 0 R /XYZ 71.731 262.127 null] >> endobj 210 0 obj << /D [192 0 R /XYZ 124.533 236.403 null] >> endobj 211 0 obj << /D [192 0 R /XYZ 161.046 223.452 null] >> endobj 212 0 obj << /D [192 0 R /XYZ 209.473 210.501 null] >> endobj 213 0 obj << /D [192 0 R /XYZ 211.805 197.549 null] >> endobj 214 0 obj << /D [192 0 R /XYZ 71.731 162.516 null] >> endobj 215 0 obj << /D [192 0 R /XYZ 71.731 136.613 null] >> endobj 216 0 obj << /D [192 0 R /XYZ 71.731 121.669 null] >> endobj 191 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F37 179 0 R /F35 173 0 R /F38 181 0 R /F43 217 0 R >> /ProcSet [ /PDF /Text ] >> endobj 221 0 obj << /Length 1346 /Filter /FlateDecode >> stream xڕWߏ8~߿$R UݫRU.VNb-`jCwx0aw{/ 3?N}ysvd?zxCva%lg0;d,S Ӎ X[wkboݑxBOsjEmF CI6 I\E҅Q9WTgEo~+7xvMOEYS^^l S<^Ꝗ`=\LU]ؤʦ}ÄvT+陨3yzzPh OEUsQޝ⧒֍djЋddf~gJf|Xs2ߌA)V2nyĺ>۞ďD(jK}%('G!Bv7adKMdJ EC}5x[ ]+j}Pz_K!ݺ:GC/CZv\^釧w3<`<]ZBf2,!BROH7[ֹZV,?AvpeN9OSDzItJZs&G*VUf9"0P5PNzhcHk:A6Jު3\`O;h2Sw0؛!T@O2t(&- p6US$͠saFOo=t۫{ (7CaՎR7|#ULT]t,tZ;p#qkL\ kd$q}h)sA+>ηb񆊠Xj:t$eN)fp bvG~̑Bc5RT 2Os #sE8yL-g Iڂ>PzGGUɲ,|ZZE.3p*;.V<:-.m1Qt5s9f7w@Qj8;ԻAbbeIs5Q ;njcltؕy2${/h!S52Y>(=ՖBr7IDq1oã XXy|vUwD&EfeLe[;46rzsA~7=w[pJOdncpw{7iUdl{fK1R1{gKVLBw,͗#^7mJ,*GJ;Hl#ΐ~] L`M6Z獻n3A endstream endobj 220 0 obj << /Type /Page /Contents 221 0 R /Resources 219 0 R /MediaBox [0 0 609.714 789.041] /Parent 189 0 R >> endobj 222 0 obj << /D [220 0 R /XYZ -1.269 814.22 null] >> endobj 223 0 obj << /D [220 0 R /XYZ 71.731 688.418 null] >> endobj 26 0 obj << /D [220 0 R /XYZ 145.412 643.164 null] >> endobj 224 0 obj << /D [220 0 R /XYZ 71.731 623.024 null] >> endobj 225 0 obj << /D [220 0 R /XYZ 136.428 584.385 null] >> endobj 226 0 obj << /D [220 0 R /XYZ 71.731 582.228 null] >> endobj 30 0 obj << /D [220 0 R /XYZ 265.163 545.012 null] >> endobj 227 0 obj << /D [220 0 R /XYZ 71.731 524.908 null] >> endobj 228 0 obj << /D [220 0 R /XYZ 71.731 495.782 null] >> endobj 34 0 obj << /D [220 0 R /XYZ 284.158 458.566 null] >> endobj 229 0 obj << /D [220 0 R /XYZ 71.731 435.671 null] >> endobj 230 0 obj << /D [220 0 R /XYZ 277.389 425.69 null] >> endobj 231 0 obj << /D [220 0 R /XYZ 71.731 401.431 null] >> endobj 38 0 obj << /D [220 0 R /XYZ 299.609 356.276 null] >> endobj 232 0 obj << /D [220 0 R /XYZ 71.731 332.52 null] >> endobj 233 0 obj << /D [220 0 R /XYZ 119.552 310.448 null] >> endobj 234 0 obj << /D [220 0 R /XYZ 71.731 282.388 null] >> endobj 42 0 obj << /D [220 0 R /XYZ 249.01 245.173 null] >> endobj 235 0 obj << /D [220 0 R /XYZ 71.731 225.068 null] >> endobj 236 0 obj << /D [220 0 R /XYZ 119.552 160.49 null] >> endobj 237 0 obj << /D [220 0 R /XYZ 119.552 147.539 null] >> endobj 238 0 obj << /D [220 0 R /XYZ 71.731 118.234 null] >> endobj 219 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F35 173 0 R /F38 181 0 R /F37 179 0 R >> /ProcSet [ /PDF /Text ] >> endobj 242 0 obj << /Length 1650 /Filter /FlateDecode >> stream xڝXs8~_ᗛ3 Ih{Ƞش8$uh%_Rjo]ɛg77 %(Yl4 !Jp<6m@Ky2Zp"bmo<' \2#lC{4 xþ"L OkH-|o^?ڂG'iKe^a!r ͥ,FI,:T|d~ݻO^K Ty`d36U}kuH?}qH+Wƚ4VAG<]@SY.NZ*3*CR BRVyG A|xӦmE@),:Yړ$ȗAo_NJ@]VUj6rBt̕x‘nf:Xljxi>E EX,hw,Jk}uUzZ=W|^TG罖sҗ&Sرz}[Z(53lqXD82dY{Lhb,fZ,?JD6 Ώkc1Ǒlʴ2Wt˹̝> endobj 239 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [451.394 448.428 505.69 459.332] /A << /S /GoTo /D (0:no_indexes) >> >> endobj 243 0 obj << /D [241 0 R /XYZ -1.269 814.22 null] >> endobj 244 0 obj << /D [241 0 R /XYZ 71.731 741.22 null] >> endobj 46 0 obj << /D [241 0 R /XYZ 214.436 708.149 null] >> endobj 245 0 obj << /D [241 0 R /XYZ 71.731 685.032 null] >> endobj 246 0 obj << /D [241 0 R /XYZ 355.405 675.273 null] >> endobj 247 0 obj << /D [241 0 R /XYZ 71.731 670.247 null] >> endobj 248 0 obj << /D [241 0 R /XYZ 137.484 651.601 null] >> endobj 249 0 obj << /D [241 0 R /XYZ 71.731 649.445 null] >> endobj 250 0 obj << /D [241 0 R /XYZ 137.484 633.669 null] >> endobj 251 0 obj << /D [241 0 R /XYZ 71.731 560.528 null] >> endobj 50 0 obj << /D [241 0 R /XYZ 313.423 523.312 null] >> endobj 252 0 obj << /D [241 0 R /XYZ 71.731 500.418 null] >> endobj 253 0 obj << /D [241 0 R /XYZ 119.552 477.484 null] >> endobj 254 0 obj << /D [241 0 R /XYZ 254.336 477.484 null] >> endobj 255 0 obj << /D [241 0 R /XYZ 229.13 464.533 null] >> endobj 256 0 obj << /D [241 0 R /XYZ 203.675 438.63 null] >> endobj 257 0 obj << /D [241 0 R /XYZ 71.731 435.228 null] >> endobj 54 0 obj << /D [241 0 R /XYZ 307.054 398.012 null] >> endobj 258 0 obj << /D [241 0 R /XYZ 71.731 375.117 null] >> endobj 259 0 obj << /D [241 0 R /XYZ 340.949 365.136 null] >> endobj 260 0 obj << /D [241 0 R /XYZ 119.552 352.184 null] >> endobj 261 0 obj << /D [241 0 R /XYZ 306.042 352.184 null] >> endobj 262 0 obj << /D [241 0 R /XYZ 230.246 339.233 null] >> endobj 263 0 obj << /D [241 0 R /XYZ 119.552 326.281 null] >> endobj 264 0 obj << /D [241 0 R /XYZ 119.552 313.33 null] >> endobj 265 0 obj << /D [241 0 R /XYZ 119.552 300.378 null] >> endobj 266 0 obj << /D [241 0 R /XYZ 119.552 274.476 null] >> endobj 267 0 obj << /D [241 0 R /XYZ 360.845 261.524 null] >> endobj 268 0 obj << /D [241 0 R /XYZ 71.731 258.122 null] >> endobj 58 0 obj << /D [241 0 R /XYZ 208.209 220.906 null] >> endobj 269 0 obj << /D [241 0 R /XYZ 71.731 200.802 null] >> endobj 270 0 obj << /D [241 0 R /XYZ 71.731 140.045 null] >> endobj 271 0 obj << /D [241 0 R /XYZ 71.731 125.848 null] >> endobj 240 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F35 173 0 R /F38 181 0 R >> /ProcSet [ /PDF /Text ] >> endobj 275 0 obj << /Length 1704 /Filter /FlateDecode >> stream xڭXKs6Wh|<#|39mi&dt< IS v_Irz/`/,~x3ې*ZvdMͲ8"4ݕ?wz`^mI+맦e^u"0@X4 'HI[IJ,z z~MeqBWQ0/hCh ekVRXxj[[n Z_#ew=怦C;Kxcm/9Z{D(DQ5@XmwYE'h!j+|LBI󭗔]!%t$V\2~O~< Ȝ 25ꍉl 1JT C*\C dnho~[Att&4ߗ_񇞅95o3);{_HㄤYCvqL 7/d0HZ^Cmޏ||\`|}d܆ n;ԍt*;-,T[-5tH2w֚di c.'9gOKŨ =VԊ ߾|8GbGsVa-:־ (ٖL@y3D\%Ȋc.}ٱU,dYcӉVl\2^ʝp+uU>|v8u'@k!Fq-ګk%*˘cD$ ZIuO~/ $h_ʺ^n/7AL26 5QFЀ$`3sT7I@rrCT" lO9}F(f '-L|1Y;JYX`R`vhLIUyIfK״9ŽP',gvw/WZ 6TJӹʽe|Z&AI߀fB"K0z5V@t# ÜV@vaTE_T ۷`W=s?xg*0*&3h(4ؖ 6lg}cn ];Mu칝zaRh#؄<}zq4}MG2gӰ=on)` dy,2KG;] %9 endstream endobj 274 0 obj << /Type /Page /Contents 275 0 R /Resources 273 0 R /MediaBox [0 0 609.714 789.041] /Parent 189 0 R >> endobj 276 0 obj << /D [274 0 R /XYZ -1.269 814.22 null] >> endobj 62 0 obj << /D [274 0 R /XYZ 328.157 707.841 null] >> endobj 277 0 obj << /D [274 0 R /XYZ 71.731 687.736 null] >> endobj 278 0 obj << /D [274 0 R /XYZ 389.586 674.964 null] >> endobj 279 0 obj << /D [274 0 R /XYZ 449.092 636.11 null] >> endobj 280 0 obj << /D [274 0 R /XYZ 152.757 532 null] >> endobj 281 0 obj << /D [274 0 R /XYZ 119.552 480.195 null] >> endobj 282 0 obj << /D [274 0 R /XYZ 119.552 454.292 null] >> endobj 283 0 obj << /D [274 0 R /XYZ 71.731 424.987 null] >> endobj 66 0 obj << /D [274 0 R /XYZ 200.535 387.771 null] >> endobj 284 0 obj << /D [274 0 R /XYZ 71.731 387.592 null] >> endobj 70 0 obj << /D [274 0 R /XYZ 284.029 353.3 null] >> endobj 285 0 obj << /D [274 0 R /XYZ 71.731 333.226 null] >> endobj 272 0 obj << /D [274 0 R /XYZ 71.731 267.273 null] >> endobj 74 0 obj << /D [274 0 R /XYZ 333.857 231.906 null] >> endobj 286 0 obj << /D [274 0 R /XYZ 71.731 211.831 null] >> endobj 287 0 obj << /D [274 0 R /XYZ 230.495 186.077 null] >> endobj 288 0 obj << /D [274 0 R /XYZ 71.731 161.007 null] >> endobj 273 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F35 173 0 R /F38 181 0 R >> /ProcSet [ /PDF /Text ] >> endobj 293 0 obj << /Length 1948 /Filter /FlateDecode >> stream xڵYێ6}߯0 ]rH">$Bh[XItI*%!E^o/hr v#oM-2 j;-t`*F'=΃؛x]kK~}|dyA75X%&VcZ .`%Q&H"7Zj[|xpv+dlai(2ZS LM D!S?'.0, FZ mH$M0޼)<"E1髷RE!I* S;&E9'o8'5S6%@HF݇|`@oo"dbq;{/^uLׄ$+Kz!q-a{^efQ;Μ4ٴ 469:!9.K42ځ;qHzi8 N}1ğ v ֘=sxڋJZ,(fѦuNJϒ2NHj H^^\^ر+i縒=QtoA?O0iĮ*w8_k"ZKE88}$WADa@7 4Q& fid+5}!QDs-ޤBOPyC$쁔 ,:?5mMs:.z# y`7<F#X2w/{$w~X:Yj/pZ7U," RsWEΚ DU AQd PW}~8悴Iew: )F[RU*K 4`zS5HxML5) -'*BA+t*sܣ|n 6SJ KP4Y}K:C |y띃,yMIosߐD5p_MG["0n=wh;WG]IhHէ[i;(,r`hs4I03ٙB Ϩ>O|` e%k}7*[A̪ ! .)^I#;̹1T5 6,Dby*F0L ?K1 \ꖙV 2"sڢIpA~&54&B |n7x+}1wR5Et~- Zo4>@I4.L\als}%Z}yӕ9렽mjG(kɳO3y_kJ?^ً!ʖ(N!RhrT7@;} {V𜖤2 ta'q=RBn2,o{na~~]߹2mϫ{}Hu<⽎_!u4Nӆ~_.f`BBs{~3AEG͢Z%/B֢tkN|htuWZZEQۺCT0t#;ABLC'f,SuDnfjfCʯo.l;`42olNv([,jhtN~\Sd endstream endobj 292 0 obj << /Type /Page /Contents 293 0 R /Resources 291 0 R /MediaBox [0 0 609.714 789.041] /Parent 189 0 R /Annots [ 289 0 R 290 0 R ] >> endobj 289 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [171.117 489.629 217.941 500.533] /A << /S /GoTo /D (0:delta_creation) >> >> endobj 290 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [178.59 402.157 225.414 413.061] /A << /S /GoTo /D (0:getting_worse) >> >> endobj 294 0 obj << /D [292 0 R /XYZ -1.269 814.22 null] >> endobj 295 0 obj << /D [292 0 R /XYZ 119.552 695.392 null] >> endobj 296 0 obj << /D [292 0 R /XYZ 381.243 695.392 null] >> endobj 297 0 obj << /D [292 0 R /XYZ 71.731 630.222 null] >> endobj 78 0 obj << /D [292 0 R /XYZ 294.96 593.006 null] >> endobj 298 0 obj << /D [292 0 R /XYZ 71.731 572.902 null] >> endobj 299 0 obj << /D [292 0 R /XYZ 71.731 557.973 null] >> endobj 82 0 obj << /D [292 0 R /XYZ 418.857 525.659 null] >> endobj 300 0 obj << /D [292 0 R /XYZ 71.731 503.259 null] >> endobj 301 0 obj << /D [292 0 R /XYZ 245.996 492.782 null] >> endobj 302 0 obj << /D [292 0 R /XYZ 406.145 479.831 null] >> endobj 303 0 obj << /D [292 0 R /XYZ 136.428 466.879 null] >> endobj 304 0 obj << /D [292 0 R /XYZ 394.359 466.879 null] >> endobj 305 0 obj << /D [292 0 R /XYZ 249.881 453.928 null] >> endobj 306 0 obj << /D [292 0 R /XYZ 244.372 440.976 null] >> endobj 307 0 obj << /D [292 0 R /XYZ 71.731 436.428 null] >> endobj 308 0 obj << /D [292 0 R /XYZ 129.514 418.262 null] >> endobj 309 0 obj << /D [292 0 R /XYZ 71.731 403.153 null] >> endobj 310 0 obj << /D [292 0 R /XYZ 129.514 387.377 null] >> endobj 311 0 obj << /D [292 0 R /XYZ 71.731 385.221 null] >> endobj 312 0 obj << /D [292 0 R /XYZ 129.514 369.445 null] >> endobj 313 0 obj << /D [292 0 R /XYZ 292.86 369.445 null] >> endobj 314 0 obj << /D [292 0 R /XYZ 71.731 354.336 null] >> endobj 315 0 obj << /D [292 0 R /XYZ 129.514 338.56 null] >> endobj 218 0 obj << /D [292 0 R /XYZ 71.731 283.517 null] >> endobj 86 0 obj << /D [292 0 R /XYZ 268.55 244.144 null] >> endobj 316 0 obj << /D [292 0 R /XYZ 71.731 221.25 null] >> endobj 317 0 obj << /D [292 0 R /XYZ 71.731 209.111 null] >> endobj 90 0 obj << /D [292 0 R /XYZ 287.221 176.797 null] >> endobj 318 0 obj << /D [292 0 R /XYZ 71.731 156.722 null] >> endobj 319 0 obj << /D [292 0 R /XYZ 119.552 130.969 null] >> endobj 320 0 obj << /D [292 0 R /XYZ 169.902 118.017 null] >> endobj 291 0 obj << /Font << /F39 188 0 R /F35 173 0 R /F38 181 0 R /F32 172 0 R >> /ProcSet [ /PDF /Text ] >> endobj 325 0 obj << /Length 1663 /Filter /FlateDecode >> stream xڥXr6}Whx,MMw7&4zK3DBk` */łW& .g^({qX؋ξ۞]EL }"\ 6]|XmlmJ.eyCI?n:>,)US/P@N;T dQ-lbvN Ti6"9D](:f Ds\V;0\mx<€hyQ⩘*Ju,Z#jiÁG`>B$v|LY,uJC-l4j2X3fGk-%9 ֆc&#]]o־uSFLI/KxQX')5k21rXps4}-ܟ)]0骐 З;N*RV €0z)J#>EKZԌEnb8ڵpNد)U͎9:,K<90fͩ.\i^p)+ s7E)r,I $f:s^Ÿ|p V c̲.!v4{?3y.u4~A~"UՏLwfa[dx6O2X/>ީѵ*cI7όjM| 1 ##8{=GLƬ>`0UPk{3}¥0:Jʳw7$:i0Rt6Gidp_M..M?3md(6S%xh)^y`O}^v;֑>c]O۳T endstream endobj 324 0 obj << /Type /Page /Contents 325 0 R /Resources 323 0 R /MediaBox [0 0 609.714 789.041] /Parent 350 0 R >> endobj 326 0 obj << /D [324 0 R /XYZ -1.269 814.22 null] >> endobj 327 0 obj << /D [324 0 R /XYZ 513.839 695.392 null] >> endobj 328 0 obj << /D [324 0 R /XYZ 71.731 690.844 null] >> endobj 329 0 obj << /D [324 0 R /XYZ 137.484 673.973 null] >> endobj 330 0 obj << /D [324 0 R /XYZ 71.731 625.967 null] >> endobj 331 0 obj << /D [324 0 R /XYZ 137.484 609.415 null] >> endobj 332 0 obj << /D [324 0 R /XYZ 71.731 594.306 null] >> endobj 333 0 obj << /D [324 0 R /XYZ 137.484 578.531 null] >> endobj 334 0 obj << /D [324 0 R /XYZ 274.928 534.695 null] >> endobj 335 0 obj << /D [324 0 R /XYZ 71.731 505.39 null] >> endobj 94 0 obj << /D [324 0 R /XYZ 344.892 472.08 null] >> endobj 336 0 obj << /D [324 0 R /XYZ 71.731 449.68 null] >> endobj 337 0 obj << /D [324 0 R /XYZ 293.587 439.203 null] >> endobj 338 0 obj << /D [324 0 R /XYZ 71.731 426.75 null] >> endobj 339 0 obj << /D [324 0 R /XYZ 182.744 413.898 null] >> endobj 340 0 obj << /D [324 0 R /XYZ 205.249 339.178 null] >> endobj 341 0 obj << /D [324 0 R /XYZ 71.731 317.096 null] >> endobj 342 0 obj << /D [324 0 R /XYZ 182.744 306.301 null] >> endobj 343 0 obj << /D [324 0 R /XYZ 244.611 283.387 null] >> endobj 344 0 obj << /D [324 0 R /XYZ 71.731 263.363 null] >> endobj 345 0 obj << /D [324 0 R /XYZ 182.744 250.511 null] >> endobj 346 0 obj << /D [324 0 R /XYZ 229.957 201.694 null] >> endobj 347 0 obj << /D [324 0 R /XYZ 71.731 166.66 null] >> endobj 348 0 obj << /D [324 0 R /XYZ 71.731 166.66 null] >> endobj 349 0 obj << /D [324 0 R /XYZ 71.731 119.706 null] >> endobj 323 0 obj << /Font << /F39 188 0 R /F35 173 0 R /F38 181 0 R /F32 172 0 R /F37 179 0 R >> /ProcSet [ /PDF /Text ] >> endobj 353 0 obj << /Length 1906 /Filter /FlateDecode >> stream xڕX[6~ϯ0-2,])!n/ vC[ű#HřΎ@EjukYYe @i~ejJUo00 iތo  mάDkn88ɿ v WڴlJB& F2Ojt 0;%O\)e(LxΜ\H 0/' 'f2oӐM]Թo>V3!@|] Qȑa2Ev0D1I,3(UEOs_s 1_UxT#d˩3,V`pb / ^ԲS谊҆&xHȾ"z`J1yqƮ{R5fVOƢ2)knr`S௟b\;2!n5 QCK!~2"$"Q ;)(JҤ 6z/_9$Sjnu:N4T6 q[ ?#Cr:n<kEKM~i.#ӣGy3K͛4Oxf5?PIm8K6qgorHB: 6H;+-ǎ!;*SIqId3{ymF3 M }Qk=24O:\COo 5X qpsж'UuƓtZ/YPǶ:G2^{ctn~pQ;\XWL-0Nԭ=oΘA(2oqX* ''=bY+>~V^FlJw?U]5+e9-oߕ~Ћ? ajzk t p@0~;#^AM#4_z [07"{OI VE`uh74,ܾhSKM~2ܚjm߰֬4f.dUQN inAE?v\Yri3hJGqC7W+ZF"㵹2=ysp`DҎ5PSwbqK_VV^Fx9%qNZ4lFz{F&VKZc+ ]3sl,׮`D0 VgX/jFM;5aFg3Ò@u}ӌ{uڏv/>?5?AĩIiZP;CW]OUBӠ =&!k_Vۗ\+Fת6̿ {qp./暓{/mo endstream endobj 352 0 obj << /Type /Page /Contents 353 0 R /Resources 351 0 R /MediaBox [0 0 609.714 789.041] /Parent 350 0 R /Annots [ 322 0 R ] >> endobj 322 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [148.722 604.443 195.546 615.347] /A << /S /GoTo /D (0:long_time) >> >> endobj 354 0 obj << /D [352 0 R /XYZ -1.269 814.22 null] >> endobj 98 0 obj << /D [352 0 R /XYZ 341.235 705.748 null] >> endobj 355 0 obj << /D [352 0 R /XYZ 71.731 702.856 null] >> endobj 102 0 obj << /D [352 0 R /XYZ 247.561 666.375 null] >> endobj 356 0 obj << /D [352 0 R /XYZ 71.731 643.258 null] >> endobj 357 0 obj << /D [352 0 R /XYZ 145.833 633.499 null] >> endobj 358 0 obj << /D [352 0 R /XYZ 198.415 633.499 null] >> endobj 359 0 obj << /D [352 0 R /XYZ 249.334 620.547 null] >> endobj 360 0 obj << /D [352 0 R /XYZ 491.543 620.547 null] >> endobj 361 0 obj << /D [352 0 R /XYZ 71.731 604.194 null] >> endobj 106 0 obj << /D [352 0 R /XYZ 290.771 566.978 null] >> endobj 362 0 obj << /D [352 0 R /XYZ 71.731 544.083 null] >> endobj 363 0 obj << /D [352 0 R /XYZ 141.967 482.296 null] >> endobj 364 0 obj << /D [352 0 R /XYZ 310.912 482.296 null] >> endobj 365 0 obj << /D [352 0 R /XYZ 391.17 469.344 null] >> endobj 366 0 obj << /D [352 0 R /XYZ 71.731 465.942 null] >> endobj 110 0 obj << /D [352 0 R /XYZ 188.929 428.727 null] >> endobj 367 0 obj << /D [352 0 R /XYZ 71.731 425.757 null] >> endobj 114 0 obj << /D [352 0 R /XYZ 266.742 394.256 null] >> endobj 368 0 obj << /D [352 0 R /XYZ 71.731 371.856 null] >> endobj 369 0 obj << /D [352 0 R /XYZ 71.731 336.308 null] >> endobj 370 0 obj << /D [352 0 R /XYZ 71.731 141.937 null] >> endobj 371 0 obj << /D [352 0 R /XYZ 71.731 119.023 null] >> endobj 351 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F35 173 0 R /F37 179 0 R /F38 181 0 R >> /ProcSet [ /PDF /Text ] >> endobj 376 0 obj << /Length 1989 /Filter /FlateDecode >> stream xڕY[6~ϯ01`+ZvH:mwMChDU2qhbi!XC6I݆mB޶Ͼ [IFq7xKNMУ YW$LT_X8g>\O |&$^!M?w>tFP2RRzZ=^ Vj%ȯ-A|̅[ڱ M yJ&vZ'G.'|icRЛ6$; E==}vz*2W?IYU<@pO}~eolL A[y]Gۙr[5࿰Po'i%ڟg[\*VB.uP^V# Jѥ]uD*s@kHkVfL0^2p䶘!,b2nCWE6e6ƦQ o vp蹲*WydD6${&g9-?͊ SD'2'iN RĀȡ,t*1o(#Wnͧiռ2iʋ1\U&+k)A4ӟj:3miݕ"nAR5^\:ﶊSl307ABlH HKSX`U%# AZɑke@b*8G V>K)m:`d.H UѿԀ5Xf .(j` :Ud+4߭&D )wh92Kg[N/}gOH=h"KATQJQ2|;ĩ'(`tiOGiռtF-%CPkddJd +ɱtOGͽֶRPcnCШtwv/#؊#]V >ӎ7-bDBLJ/.Wzt$掚)Y1R{[pon!ՑcԂNdkZ]E|ZgMiƐ9 ڂ ǧ;^(/Cn=b =#6dN+9uVxyLX$Top# XB!I3GpVw2S!$[Xe^,1H~$ 4=h|&Z Mo{yˀmЩլ fb @Րr$p:t@(2r1ǹ@1vAxV7SU0QKÑG+G=ԲLF#[qs+Hyq?ӚJ4g*o`x[Py]&/2Y&T {U:$HO[Ρ7Mt3 nR'Yh(3`FLIbC$gq؄!2VkՁmXqc /~>jx`r^˨&M-,V ۏZIMA}gډ![ࡂÑMZ!Ʋ#'E'c:dbPٯbΗM(RW0a\@扁َ̻{@$-c;~w[R?fχ؏-,xAJ8Ve-ubmm^('u4C DZDxgPK,;njscʍ#}\9CJu9JywF+ıG5'W7&r#Wc_938/7E캎KCsɗ{xNes`,dfGYsEϘEܤTvp<8>%Dn=\M3oNb؋Cx?[Û3 endstream endobj 375 0 obj << /Type /Page /Contents 376 0 R /Resources 374 0 R /MediaBox [0 0 609.714 789.041] /Parent 350 0 R /Annots [ 373 0 R ] >> endobj 373 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [171.307 120.458 225.603 130.989] /A << /S /GoTo /D (0:predictor) >> >> endobj 377 0 obj << /D [375 0 R /XYZ -1.269 814.22 null] >> endobj 378 0 obj << /D [375 0 R /XYZ 71.731 583.597 null] >> endobj 118 0 obj << /D [375 0 R /XYZ 223.202 550.286 null] >> endobj 379 0 obj << /D [375 0 R /XYZ 71.731 527.701 null] >> endobj 380 0 obj << /D [375 0 R /XYZ 71.731 391.716 null] >> endobj 381 0 obj << /D [375 0 R /XYZ 71.731 325.714 null] >> endobj 122 0 obj << /D [375 0 R /XYZ 229.621 292.403 null] >> endobj 382 0 obj << /D [375 0 R /XYZ 71.731 272.329 null] >> endobj 383 0 obj << /D [375 0 R /XYZ 301.229 259.527 null] >> endobj 384 0 obj << /D [375 0 R /XYZ 387.225 259.527 null] >> endobj 385 0 obj << /D [375 0 R /XYZ 180.054 246.575 null] >> endobj 386 0 obj << /D [375 0 R /XYZ 71.731 242.027 null] >> endobj 387 0 obj << /D [375 0 R /XYZ 137.484 223.861 null] >> endobj 388 0 obj << /D [375 0 R /XYZ 71.731 184.907 null] >> endobj 389 0 obj << /D [375 0 R /XYZ 137.484 167.073 null] >> endobj 390 0 obj << /D [375 0 R /XYZ 71.731 120.209 null] >> endobj 374 0 obj << /Font << /F39 188 0 R /F38 181 0 R /F35 173 0 R /F32 172 0 R /F37 179 0 R >> /ProcSet [ /PDF /Text ] >> endobj 395 0 obj << /Length 1856 /Filter /FlateDecode >> stream xڕX[o6~ϯ0 [dɲӇ V`@-6WZDw(P4h";Ύp͛_׻.m0K6IYmϞӉ.Vq /(W St7YTo=V$ Y:[I[#. Al;Wh(7UM +Qs瀶Yfsb e$^8_J.HAfSS״Tj~A;uJVԜnU_Q:=ڂ(Ev>DQ<젊R,DD vK`0PDJfģØ\ӊ5 S'\)09̴iE˂G+n 0VJ՞o}YQɉjz5ձhޢjc=p3RQR)NɸRD{x^`~U 6N3z"G;u7߄K ę=U*?Ho3:R5U hlc<3>헚Qp/LRlT)j hL WS|9rt dT,&"U0X_$N>&s7ŠG! 19D.bIy^;*<D]jeEQ J֌2iSX|3By1}& ڧ:蠃1xxﰤ@\t*"Ď!An/gmJϏ"hA2XT(KwTNC?rgr K hjmeXsLjYNS4MdҘ~Uv`ZMG$eeNB}[Sk tMs=:+1d%A),06MM3+q(uř?Ӡ*)@*Ӻ.\߹RÈ@KU7PAu߽TdL\*U" RԘ݈M`VWgVk{uIuփǐ׮z}.Aԟe8wAuZ5 bփd`p+5 ^Wu0U˂M IW 3ެ= yߙ&6E]>ouOBg;2ݎf:ϔ+&s)[IqhpeB>蚔iA87#1x~|/?ǯgrg dgrˈ=K,yo pKѸV_8(>||zh zD #O3!3mnuAMuSZv[%ш$zLzxG9 +~9ma~= Pw5^ <5K7 )]EnE{E`auZA#SߡZfmi/z_ZPc[] 37v^u^7/M-i|-<#p̪؎u+mz|0@F.7>uI!ˡI'U0˼E@3:H_\xCnajF#9dS$ٸ:܇>%׏Y,Nyw#vWF1;}w:sδo^\H_#ը:$`%d`d0g)LMe¼&\.,ىf`Uh3iBz4[ g'/lGf|} Myн[O/$RB`02 lgMZޖ_nd endstream endobj 394 0 obj << /Type /Page /Contents 395 0 R /Resources 393 0 R /MediaBox [0 0 609.714 789.041] /Parent 350 0 R /Annots [ 391 0 R 392 0 R ] >> endobj 391 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [404.241 466.237 451.065 477.141] /A << /S /GoTo /D (0:format_unzipped) >> >> endobj 392 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [468.44 466.237 515.264 477.141] /A << /S /GoTo /D (0:format_preunpacked) >> >> endobj 396 0 obj << /D [394 0 R /XYZ -1.269 814.22 null] >> endobj 126 0 obj << /D [394 0 R /XYZ 291.632 708.344 null] >> endobj 397 0 obj << /D [394 0 R /XYZ 71.731 685.944 null] >> endobj 398 0 obj << /D [394 0 R /XYZ 71.731 601.579 null] >> endobj 399 0 obj << /D [394 0 R /XYZ 71.731 563.537 null] >> endobj 130 0 obj << /D [394 0 R /XYZ 236.722 528.169 null] >> endobj 400 0 obj << /D [394 0 R /XYZ 71.731 508.095 null] >> endobj 401 0 obj << /D [394 0 R /XYZ 71.731 443.074 null] >> endobj 134 0 obj << /D [394 0 R /XYZ 420.43 405.858 null] >> endobj 402 0 obj << /D [394 0 R /XYZ 71.731 382.963 null] >> endobj 403 0 obj << /D [394 0 R /XYZ 340.332 360.03 null] >> endobj 404 0 obj << /D [394 0 R /XYZ 333.437 347.078 null] >> endobj 405 0 obj << /D [394 0 R /XYZ 71.731 312.045 null] >> endobj 372 0 obj << /D [394 0 R /XYZ 71.731 248.1 null] >> endobj 138 0 obj << /D [394 0 R /XYZ 274.89 208.827 null] >> endobj 406 0 obj << /D [394 0 R /XYZ 71.731 185.71 null] >> endobj 407 0 obj << /D [394 0 R /XYZ 338.433 124.144 null] >> endobj 393 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F35 173 0 R >> /ProcSet [ /PDF /Text ] >> endobj 414 0 obj << /Length 1976 /Filter /FlateDecode >> stream xڕXێ8}W6͖dɲ7ܒ^^"%$j$*[4YH)^LmK,9EF"Z˛ŁvnrZ|oϒKmraMEKVIlߗDF64#<[ld^[pi(ʛẎկ_yKFKE wg]Ǵ+.|dOʶ{՘ H&NSc2FK3?M٭2!U-'Cp1_MBjQFb&M+OȞq#Fݶg%uE38|8B47'O H]VUQ?0Vҡe*77(f Oh8kWIvhI?2yؠ{xɂxgFTXЫ"N6%Y!4"8Qd}h UE(ro[mQ[״_Iֿ-Q"6C{hɖSTͣXnI{ $D.HL'=)IJMWI%%vNNs[L2w/֢cBAu1ptaDp i LW_ja2A=^Ć+J xb/ G!/~˿Glgc*HsÛ<҄v $f×vP 5ftF+a; IFP4T'y*Z($݀ -Cm[\Ԩlͩe<$9(o6ozeHqp[S2ެM]=m+64@q٧WB!fifawOy3 a@Fv  F_.)H`lθpؒbV2蜇ހϛFTI∤#rdCu5h'V:x1Nr70g >艹4|8wsiDb9kǥ,J޿dyötrU1,[ᨉe *NCqqb̨!O4T8VsePe )%bRZ<7  LOݸ6xqݏpqIHl`|v} ǕkӀb6^:]f/|:}mR=4]gbv؊BFÜn !*@N~;9{;肴SilYbNݬ\M+ c@:1 ~NFNHNy KF>AFMZ8$Ci1ZYq5?&>bi<wxDqtQ*x. ][`M0TdhiBqru- Qd'naanp{MBK`ד ^!ȅp>hmŬr^tjFhz{pAgѷcW;BwLG{Wfy:yhf9ޏw bҟhpX5rf@GϟmB[Bsdq>Vqpa+I2cqs QCE;n@jڊv[n\?1qEG&LՁ[%Y4P׬;Q$#+jL\%U\l ʃkp)3+3)̬=Pq$ɷF&u[1u_RF8m?`h+XH 3o+&{/f/jꆬ!"؞8m3l,ݏR=D~2Mcı䇗7L endstream endobj 413 0 obj << /Type /Page /Contents 414 0 R /Resources 412 0 R /MediaBox [0 0 609.714 789.041] /Parent 350 0 R /Annots [ 410 0 R 411 0 R ] >> endobj 410 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [231.261 508.159 278.085 519.063] /A << /S /GoTo /D (0:format_unzipped) >> >> endobj 411 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [459.902 508.159 506.726 519.063] /A << /S /GoTo /D (0:format_preunpacked) >> >> endobj 415 0 obj << /D [413 0 R /XYZ -1.269 814.22 null] >> endobj 416 0 obj << /D [413 0 R /XYZ 71.731 718.306 null] >> endobj 417 0 obj << /D [413 0 R /XYZ 71.731 666.087 null] >> endobj 142 0 obj << /D [413 0 R /XYZ 216.028 628.872 null] >> endobj 418 0 obj << /D [413 0 R /XYZ 71.731 605.755 null] >> endobj 419 0 obj << /D [413 0 R /XYZ 71.731 535.059 null] >> endobj 420 0 obj << /D [413 0 R /XYZ 147.227 511.313 null] >> endobj 421 0 obj << /D [413 0 R /XYZ 361.054 511.313 null] >> endobj 408 0 obj << /D [413 0 R /XYZ 71.731 507.91 null] >> endobj 146 0 obj << /D [413 0 R /XYZ 272.71 470.695 null] >> endobj 422 0 obj << /D [413 0 R /XYZ 71.731 447.8 null] >> endobj 423 0 obj << /D [413 0 R /XYZ 236.054 437.818 null] >> endobj 409 0 obj << /D [413 0 R /XYZ 71.731 343.756 null] >> endobj 150 0 obj << /D [413 0 R /XYZ 298.748 306.54 null] >> endobj 424 0 obj << /D [413 0 R /XYZ 71.731 283.646 null] >> endobj 425 0 obj << /D [413 0 R /XYZ 351.121 260.712 null] >> endobj 426 0 obj << /D [413 0 R /XYZ 71.731 173.873 null] >> endobj 427 0 obj << /D [413 0 R /XYZ 294.654 163.078 null] >> endobj 428 0 obj << /D [413 0 R /XYZ 71.731 128.045 null] >> endobj 429 0 obj << /D [413 0 R /XYZ 71.731 128.045 null] >> endobj 430 0 obj << /D [413 0 R /XYZ 310.464 104.299 null] >> endobj 412 0 obj << /Font << /F39 188 0 R /F35 173 0 R /F32 172 0 R /F38 181 0 R /F37 179 0 R >> /ProcSet [ /PDF /Text ] >> endobj 433 0 obj << /Length 1131 /Filter /FlateDecode >> stream xڕVߏ8~߿%~'iui]MVYU^9ZKx ;|3iv{a5qXOҞks6Cdn{$%JJ<|e[h Lw\p枦ۮ vx'4~t2|o|XOu4'Oz~Z?u b/fs>|]=_֟$ GUܜxX=nn7]+ ]l7҄4۬1cT~C `P?PP zҤ(掬︇B9<Ҳ8ñ aptt]_rೈ"fZzA5!g"9 ^p8TЅT>ܗùW1bx!"~,*_ڭa!^W@5'wͬm^MrZ%f[5H^i%s UI#BٷL\ Q{8!X0p9{jy@?2;1nز"^pm>8QC1 K$.bf\*5;TY<ɹQv .h2)XRtU$"덯%S-xP'Bw_a?@!>!ETQ33ETmx#yNzQ1R0=c pFEU/K[Jc$xt]vؖ"rs|+v6mg/B>OJMLi1&~R0)Fdt X5g9e*,>.~`T#3NS4c{cf}y](xD)8HBz PݑԙOd󈄠F8QbR$Q PfJ jJ haUXug"?<(&eFؑ>3(wM 4!e7{fӗ4=P SX\M4LyHRoW+5"鄣CUt%+J R6JGc^C:LC2nJ_ZxHybtSįޫ(GE,Ym+B]ZC6\LsW!AFn. qi@ݱX: endstream endobj 432 0 obj << /Type /Page /Contents 433 0 R /Resources 431 0 R /MediaBox [0 0 609.714 789.041] /Parent 350 0 R >> endobj 434 0 obj << /D [432 0 R /XYZ -1.269 814.22 null] >> endobj 435 0 obj << /D [432 0 R /XYZ 71.731 718.306 null] >> endobj 436 0 obj << /D [432 0 R /XYZ 71.731 603.372 null] >> endobj 437 0 obj << /D [432 0 R /XYZ 71.731 411.158 null] >> endobj 438 0 obj << /D [432 0 R /XYZ 71.731 350.222 null] >> endobj 439 0 obj << /D [432 0 R /XYZ 71.731 293.5 null] >> endobj 440 0 obj << /D [432 0 R /XYZ 71.731 293.5 null] >> endobj 441 0 obj << /D [432 0 R /XYZ 71.731 245.734 null] >> endobj 442 0 obj << /D [432 0 R /XYZ 71.731 195.552 null] >> endobj 431 0 obj << /Font << /F39 188 0 R /F38 181 0 R /F35 173 0 R >> /ProcSet [ /PDF /Text ] >> endobj 448 0 obj << /Length 1682 /Filter /FlateDecode >> stream xڭXM6prYXdEuQID[l$R8_ߡȑ%YkГ)J$g޼y?9Lɇۛh3ِ2\Nb p5YE!YdN>OyaOSKY}R5lI"a/n^HYNpIVnXZO?4[ؕ -acNJl@URU(JM;fGd'_>Us  3:vtKK{CESvk|s&y>U-"Qn:q:9lvLF ; !w`˽Hyx"Ep7\.>Ds0૩sAsf <>?=AINۜQ F%Q/a<=\R}d^:whR 8~tbmu&4tIYDq$)v1{3Mz1Oa\; $e W-NJZi7q$fx|^2AY'z*KITRd>!dߓnyBLɚqf6 ŌtFFx82: }w;ؙW@1Y-/#”H[HULTYƠ#ֱ,_<2=Re B ziFs,IwHmd_WmsᲮ)[Ouc6d{Wx Ag 5*\粥GtwV,fzn{+3/ˉ,My/Z0$yF811-`.nS:ȤcK h`6F fB2 )z{&[p5g1cbPh9(l@kJA[+ھRPx%6N} m M0G3ȟe.%{Bc4gWGTX@ck9~yv=5 S5۔|fZuJP$s^DڽڗR#aJRJ- 2e1%]sLu-\) hcþ kj3*g{؎%EI߻xDO0Hڬ5N"yU9 n;-;$|y&h þP[pc,B3B‘=t¤j~阀ToVm!BA㻇1G&|/^Ger/ ř4*cxUm`WPL]xl8ȹ`8s罚s͊' mJו 1SzyEk"GrjݲD;bSSl&;QmJdxWSGOUe_;R{VJƽ@]墄#HtI!reϧ=.F& D=?=9ܺmo3 A.&í -$!f17sn݀%fQC:Tmeo#P{w%R2p&z-#n ޒ_7d81 endstream endobj 447 0 obj << /Type /Page /Contents 448 0 R /Resources 446 0 R /MediaBox [0 0 609.714 789.041] /Parent 480 0 R /Annots [ 443 0 R 444 0 R 445 0 R ] >> endobj 443 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [160.289 609.037 214.585 619.941] /A << /S /GoTo /D (0:predictor) >> >> endobj 444 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [269.678 439.672 316.502 450.576] /A << /S /GoTo /D (0:long_time) >> >> endobj 445 0 obj << /Type /Annot /Subtype /Link /Border[0 0 0]/H/I/C[1 0 0] /Rect [198.196 325.819 245.02 336.723] /A << /S /GoTo /D (0:the_enemy_within) >> >> endobj 449 0 obj << /D [447 0 R /XYZ -1.269 814.22 null] >> endobj 154 0 obj << /D [447 0 R /XYZ 131.571 706.118 null] >> endobj 450 0 obj << /D [447 0 R /XYZ 71.731 705.903 null] >> endobj 158 0 obj << /D [447 0 R /XYZ 203.26 666.746 null] >> endobj 451 0 obj << /D [447 0 R /XYZ 71.731 666.566 null] >> endobj 452 0 obj << /D [447 0 R /XYZ 71.731 663.697 null] >> endobj 453 0 obj << /D [447 0 R /XYZ 137.484 643.074 null] >> endobj 454 0 obj << /D [447 0 R /XYZ 71.731 627.966 null] >> endobj 455 0 obj << /D [447 0 R /XYZ 137.484 612.19 null] >> endobj 456 0 obj << /D [447 0 R /XYZ 468.211 612.19 null] >> endobj 457 0 obj << /D [447 0 R /XYZ 71.731 610.033 null] >> endobj 458 0 obj << /D [447 0 R /XYZ 147.447 594.257 null] >> endobj 459 0 obj << /D [447 0 R /XYZ 71.731 594.158 null] >> endobj 460 0 obj << /D [447 0 R /XYZ 147.447 576.325 null] >> endobj 461 0 obj << /D [447 0 R /XYZ 71.731 543.284 null] >> endobj 462 0 obj << /D [447 0 R /XYZ 137.484 527.508 null] >> endobj 463 0 obj << /D [447 0 R /XYZ 71.731 525.351 null] >> endobj 464 0 obj << /D [447 0 R /XYZ 137.484 509.575 null] >> endobj 465 0 obj << /D [447 0 R /XYZ 71.731 507.418 null] >> endobj 466 0 obj << /D [447 0 R /XYZ 137.484 491.642 null] >> endobj 467 0 obj << /D [447 0 R /XYZ 71.731 489.485 null] >> endobj 468 0 obj << /D [447 0 R /XYZ 137.484 473.709 null] >> endobj 469 0 obj << /D [447 0 R /XYZ 71.731 458.601 null] >> endobj 470 0 obj << /D [447 0 R /XYZ 137.484 442.825 null] >> endobj 321 0 obj << /D [447 0 R /XYZ 71.731 424.793 null] >> endobj 162 0 obj << /D [447 0 R /XYZ 312.921 385.52 null] >> endobj 471 0 obj << /D [447 0 R /XYZ 71.731 362.403 null] >> endobj 472 0 obj << /D [447 0 R /XYZ 433.223 352.643 null] >> endobj 473 0 obj << /D [447 0 R /XYZ 71.731 347.617 null] >> endobj 474 0 obj << /D [447 0 R /XYZ 137.484 328.972 null] >> endobj 475 0 obj << /D [447 0 R /XYZ 71.731 326.815 null] >> endobj 476 0 obj << /D [447 0 R /XYZ 137.484 311.039 null] >> endobj 477 0 obj << /D [447 0 R /XYZ 284.7 298.088 null] >> endobj 478 0 obj << /D [447 0 R /XYZ 157.688 272.185 null] >> endobj 479 0 obj << /D [447 0 R /XYZ 157.688 220.379 null] >> endobj 446 0 obj << /Font << /F39 188 0 R /F32 172 0 R /F35 173 0 R >> /ProcSet [ /PDF /Text ] >> endobj 483 0 obj << /Length 710 /Filter /FlateDecode >> stream x}UMo0R-HVM՞zV`dL,cdF̼y͌N`S;އ4"Q؉CFpܬOv q*DR W}> ([!e(N&TV΃ЦPLWPVmp MвTd(s` +z[+VCE1 t$:#t@Eb:<đwU^'S-(#xCe)F4? ( p27MS2='%|Խ9[7Jy( bxI :OImqQ9lϭ0-S1,BX*qӎiۋ2i5-\h~Rt%mt=^65DjF!bm׺I=}Ylt{AX0 ?_?Oa/#={>c.|6@r-bC5G1 {IbZ{gGҮC!B6|7boV~1SqJkWxsխ5޶<xxqMWv:n-#5jyoe 'gtJt%; endstream endobj 482 0 obj << /Type /Page /Contents 483 0 R /Resources 481 0 R /MediaBox [0 0 609.714 789.041] /Parent 480 0 R >> endobj 484 0 obj << /D [482 0 R /XYZ -1.269 814.22 null] >> endobj 481 0 obj << /Font << /F39 188 0 R /F35 173 0 R /F37 179 0 R /F38 181 0 R >> /ProcSet [ /PDF /Text ] >> endobj 486 0 obj [278 333 278 278 556 556 556 556 556 556 556 556 556 556 278 278 584 584 584 556 1015 667 667 722 722 667 611 778 722 278 500 667 556 833 722 778 667 778 722 667 611 722 667 944 667 667 611 278 278 278 469 556 222 556 556 500 556 556 278 556 556 222 222 500 222 833 556 556 556 556 333 500 278 556 500 722 500 500 500 334 260 334 584] endobj 487 0 obj [420 500 500 833 778 333 333 333 500 675 250 333 250 278 500 500 500 500 500 500 500 500 500 500 333 333 675 675 675 500 920 611 611 667 722 611 611 722 722 333 444 667 556 833 667 722 611 722 611 500 556 722 611 833 611 556 556 389 278 389 422 500 333 500 500 444 500 444 278 500 500 278 278 444 278 722 500 500 500 500 389 389 278 500 444 667 444 444 389] endobj 488 0 obj [600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600 600] endobj 489 0 obj [333 250 278 500 500 500 500 500 500 500 500 500 500 333 333 570 570 570 500 930 722 667 722 722 667 611 778 778 389 500 778 667 944 722 778 611 778 722 556 667 722 722 1000 722 722 667 333 278 333 581 500 333 500 556 444 556 444 333 500 556 278 333 556 278 833 556 500 556 556 444 389 333 556] endobj 490 0 obj [556 556 167 333 611 278 333 333 0 333 564 0 611 444 333 278 0 0 0 0 0 0 0 0 0 0 0 0 333 180 250 333 408 500 500 833 778 333 333 333 500 564 250 333 250 278 500 500 500 500 500 500 500 500 500 500 278 278 564 564 564 444 921 722 667 667 722 611 556 722 722 333 389 722 611 889 722 722 556 722 667 556 611 722 722 944 722 722 611 333 278 333 469 500 333 444 500 444 500 444 333 500 500 278 278 500 278 778 500 500 500 500 333 389 278 500 500 722 500 500 444 480 200 480 541 0 0 0 333 500 444 1000 500 500 333 1000 556 333 889 0 0 0 0 0 0 444 444 350] endobj 491 0 obj [474 556 556 889 722 278 333 333 389 584 278 333 278 278 556 556 556 556 556 556 556 556 556 556 333 333 584 584 584 611 975 722 722 722 722 667 611 778 722 278 556 722 611 833 722 778 667 778 722 667 611 722 667 944 667 667 611 333 278 333 584 556 278 556 611 556 611 556 333 611 611 278 278 556 278 889 611 611 611 611 389 556 333 611 556 778 556 556 500 389 280 389 584 0 0 0 278 556 500 1000 556 556 333 1000 667 333 1000 0 0 0 0 0 0 500 500 350 556 1000 333 1000 556 333 944 0 0 667 0 333 556 556 556 556 280 556 333 737] endobj 492 0 obj << /Length1 1612 /Length2 17056 /Length3 0 /Length 17896 /Filter /FlateDecode >> stream xڬctf].tl;m۶sV'FǶm}>ߏ5ƚU5sEFB'djo scg[:)]ldd"N@#K{;Q# 7@h L\\\d{'Ks  Y {OߝΖv/n@{[_*@ 0D%j jlci49fN/&vLK`pvXt7:8l-,NFv.{b3q5_98 hldUQT<],\l 7ijoOIu1s]e Z:;y_4\-- hndjtv Wz#W` 1ebonsK;XEo f/ #S{;) AoJ}"7H"OC<'{d16FNp#[KÆ fٙFKgqKwoeW3:X*fçjaibmOڙ'":ީR_b9{CXI1p&db?d \,:KfdWٙ؛3+*.Fvq:9U_'osA݁&K&ɧDI.N &n?vy+ϒZG\<8򛭰f|\Tr0Gěo&W00dlT;- Nz-|1vM"=K{$7Ē&6پ=k5?` ʘndYA{LuM^qT U2qx40! @//6G?]8SihVldfS$-5:5嫳ŷ܀@80]g4@i5k[f-j{+*JB~HFb#붭p=\ۙ4h ^oʛMfr!1y }!d(zu4μ; ~Wl>`f  '.*\ZWW-z :>;ŅCL-爌x6=<~S_.G͟߹C`(iu0:Ө¤׍Ԝy|r8G;1A53IJ49 ft0z]OAiY꒸eAs!t͝#QOky P$9)5D@j1DnS><Ihw>Cnڸ V>R}i}hGav*S-I]cAV? lx# ޥGeNs!@׋"W R}5:ݑP^C+yp" $A>7$L7{o1xzgqcIneF*+!@sa%xeZ1<,G? ä11t= !( x/I@biu7 /vʸ!XQS|n@Vڢ ~6r[@'c{MLVT[-dӽzK(? &b-ȳx_«"26s j+;C2.t0y=dwY}M29x0.*" EBYbʦ!pvLЪ:/H<+UZk/v05cFlU6.f헔{cagS}23Cn,NNnHmmC5/`b@+Xȥ4s@H&_u'*R^4i$tn`V[D>vA x PcY1^R[X! ?ZpeaRmC4l߹9U{ Ad~l]T& +Qe|+NK&1/o]({fbae|B9Z%!Jbw=#㳍y2j|֜2]=D,dC`ØP bCުB>PlNIKQ)& +xT}T`fݬ{::GY}wxqD;i }8M/a}؂Nwc〷΄۩%ӊ֟83x4'>B/2yެ/VxlZ4.3uܻl&П k}RŘ6.Ofvh"Nt[?63[{lj?'l˟)! a u(ؾ^JNE{Wbb'cX41IW5ȍ찴v~}ְddޗ7\ga~%#˖FoS90jfH-ޔMP +=(&~#1`/45H/:N(C`(O.%/yztG"iUn;=v靚:u#,nLx(}G)`z|_eZ@V5CdH#=Gop:>4Kp$-4WC~dyʛV}B}TFOi†щH?hϭ '~ .6 D#Q;c{"Y4kEL?yjB l'-NT-(Ц/%UUI<#ϊ| B+n/2a(;R1]aIҪm%1o8h3MkcwٯPI̤ bM|+Eb*0?"G@hUZ;f>{'"8cE.  (CH[LRÚ;d% wfެՙA-0IeiU9 2קΈC6O'L,8ߑ2@l,UNp"K0*eD}xZ#'yE 9@Qyy'vhEBT`FYkmUqq|aj" $ R2uJ+ǁXM}JR1/2#oOwbRm|lQ{ mB2^T7oh׆ް=jw~ٮ&_ K{a?ͼknh?= os9U|iLh/0>t?RZĨZ'zl0FΣ8"N by)?*l3iًc([c-n:ogo*SNhY(Ԇ29b˛ݓ)O5']DE\a&grN{q%^r=ނk!k#dsGOQ֥z?a"xPDQCao:Wb$wk#&TD1ZCRkX̱Y3] ' *0#p͹wR`}0pCtJH_UMGyu/;wUR+ Z2Xw{}~FHVɺ!`b7YO0y:>C|޲ 5jJ9W'q"; ywuvp0E+H%צo&V8+~#ih +:͊(J}]fӐlaYyo>K{;ERh;Qw2=%'A9`ezF$d\\?=HDϺPhn8#Щnq5] v)eQTI6݇zUC %=(ܼP0z`:NETj@-7-׎ݚ];2df 駟@L.VR R?l| ud$`4V&fك^br}K+ϖmoRuWhGޥF9:/볱0K?p@:gSBJ(%='ŧ~M T `{Z㧗Ɋ0{SM/QPՕ-6z!֤F^|':ƶ Jfcg]po!\)/c }xY*$SoƈIj 2o&BİM@{+u̬Y?Zo/墲&]gꪊ?a1jr'~Iսx-. >Ny"@{iBV Qf WS>۔Zw^ 'R>C_qv]%ȫd'Fq5W!zrA V9;͊Q/5mq+xzRpڷv'BXO2045ckQQ&F{|PA7vTw^ {!goMs:|x^1*S="(Ch? (8[B "fYlPove{1 \0ݵ#K'nVsӘ'\U64z?I9v:A]8+OۥRS\@[m'I.vQ-c6 :Dn3#΀[W8UM"_)6c[+d갶p&nLƕw9 E݃R>_h$j(|Ӄ'*߳ `MGCK`Fn&jHݍh?"oOB^Wi+>D28eMKMdjN.v̠\!Z$) ̡$Q2O\^g)CEg™"ZYj [g ,m~ 9[ȧ{LR  3%O6r!J8sެ1O[b_GCľ 4H#*}Gט*-*Ζ8G,Pkͥh#K:l];%e`Dؾ&<.۶z@7k6D{.> ;AN<1cd?ẓxtv#ڗ/U렗j-;dW2|Qj"ҕ|L\96ҬA5ȕ D<ׄAƏtW㳀Vq3:+f5E[mx:pi1a_4u#$K9$U &zFhlv-B3UKȐ$ܑ$ӓT.Z D6MƐTVeY)Ht`=/[Aط+梅4i3.ܷa 1~E0 ˭I ލ ݍ 0y49[*D8,A{}an⿧R8{w{(q8Ek 2NT2]OU1n$#JU̧R8z/cvԯ1N)+\*g4-d|+R͋h>¬ 9?-;> dwW Mϙ#̜3Fn/ 3ۥ,eaPb`qx%؏_"< + )(D,a8/gGKBܶt5\aboLyL=jN{\^f؝ <٤&vTq^u8z?ZS;:-7ޭ2.8qp~W;ݩ{s$?Kc4WZX ?a/KV`z (0(P{f mOݜ9y3jrZ\;R;XZCEt(|YQݪ2iKd8iYg({8Uq F$͊HR }wȩ#u,vmb $ ' As[ MO^]J8q%eźTV*O.wQ_+Vx|('"HwxKV3rOa~[F켞Gs̏t+hN[rM.Vm#+!jk#fwbkw+Ac=.J*D-4F"3*:pJU?rg3o,8ե3Yί (M5z~/֖%JCq^%C型1/{{&AYMݿküSZ3HYӡgZ솕s-J9ʘ{u|urr9[?{ft,2D -H!ꉢ H |=_ ZfH;Q2.cY3]T\fkGR$LeT0_fdB*:F"%W /}0lˣA%0*Jw{1.j-G0y0)uX`=5ez\ "- {jccA,Ӭ~$Yc&#P `;(WJ}AF1MqLӉjÒu-Zz;d4GE ѽp%cܪ$=U"oynuӻ}*fh$nW)B,MݵJOxW.>XAkS.}E6jfVBM3 /(5R u-_l'0ބ=vLg/:Z8 {"ٻHDNP %6y}fSc&E8N0"Ū #pu|6Jpi<׸8Y\] )ݬmE)mGOe?孰ؤa6q2R]|'?cnVbkF Ĵ"@Ԛ7+3 tˤYNۏ} A_,D= iR*r`YQĈcx˙v`$[2%fdhkFulW*yF6+/:ȷ-ԋX S> ”gXL|7(.^xoU.U:A$!!5MOEtQ;m 1LA\LȤmwUihP.gʸ5XnAT?XQ apO!UZe3Eqnö]FK$B5]Ȫ弐|\b>F9ƵdܹO&T:x6VSP;(ߍBХq8iI_Ii .А|Ձ<)ތl2 Ub \ `h D_xnP$kaX\ȖL2J鶕mI|%␽!QV> RnD*L{%3}|prN Khsqa|߽Gt7>4o[.q21 $GfXHjl2ެox|&=#.v-d:U@&1/autl~ %>i?WOҥ뇽0; B *?jGaP@1|PŨ0F>#ha1@KkRf!^pvGPعx:j9ϩ|KQrj4lHC>^ X}.!,sC0_? D+1!PӪFۦh3uZo.ۡCTaZiEŸHT~G?բwwDM9OC AxubM>qM Vp8\B׌%w X3bS˪z!?7~2Q$g =EmݽMQ;Sd]5ILzo5|k&3QOI2m6.+27Čت~ C{c~N]zwi#Mtȯ컇H_^`sj \.>(ȧc/Y;* Dc#AaeJ>f3 aQm$eud+=NzYRSҞHDPڝꐚJZ8#헝 h )}7.$CWmN ( 6b] DROIp| A03bYˉ?}V:BcbtJ8 !c j\e*"D uaM^*nY߳!F;sΥ.Je*8J?*{#wy7^ |(# XDi oquZsP~irHCݢ2EkX['Q܅ǟs\QoiRяN9:G|W1Κ0 I9Ʒ3|6vx=-i-xR5N p"C6m임)+do+REqc(&fi"fFL[rzrs_ݣ)j#Y Re%̬ |.sĨCJGxjVS3)VU[優Xg[z8?^cּ:fM (h/SibthUpVכQ~0M?TDle2KgPF)NrN8rϢ} MWcߋkwDVm'8=$tb0ե: 3zƋ, Ib>scU>yK8@L75ٜUcC8R:|N)! ޻,{`/5gޥWO{uȊ;)qg;ŦuRh,ث~%U?7{?Qyi?6NEQ#v_pްUu[R GTzRğӦoT,-vZE-<0k-b!Օ6C?AKS5hT?@ G)(c\ @Z>8#).L*]#]o]0~ 譪wW .:P baYkk5V]c^7!) )[-tk%w֘r#:=e/y;̰LM?h]rnh#dpSp;6ױ7j';a'{ -Qy{ =8hz~sߟņXhpY~&ǀ{i˳9w?Вj>#4֙+tu'D:cO -T!W_E\rXGJ<KI] |}+Dio6kֵַ:ʫJ _ioJ+(4_h 9O~a/!/Ulɐ>l8~_0u,(Z:Z)u^2pp*^ySEON[2^v3G=˸lC/*ƅ? Ghn0tFSޅU% 6TL61Z}9A`G͐h&9w[hMCK.7Q#6<}KOO{[,׾ɩB"6%YVܥ]{ì^l5(_s "ڑ!Ѽh|N% 5u.{s=pS|/ν(t(pMxUa \x:c]lqw'MN^ި@8dЀ AS)鏷0nM&\2*|P[E?"x><@mƞSJT)5( %-xS+2MAkݶ>1x3 ,nҬm'-R1_+"Bj׃|CZ/y WO> H`U6OpItx V,&g7q M4S3޴k}ƭ4(-Wck喩YyL 񙬔3B\lm`6il^WO/`rʹ&oV w'pо.kyB{ ̆rf#)Y֩G}_#|KKbddŦ]&̸/32Ƣr+\ڵj֮Gwa<Uv}wא΋kSQ=I9d̢SbPa_pN™!z!3`ުLR}["8W)5R/I1ENr=}=H=" 5ʽ)uBsK"ONwHUIKBXՔG2<0LTa|5J9˧o CIVM u3NhTy]σi['v+h|wjQo2˨z۽t?v{jɑֽku-PL_mKsqXU2whadѰw=u$W [7";R 9|ŒD6L64%LICa-%Y؃B+|D.g Y9iaj x״GbF屜DbkP%Y#$1]6$Tt_Jw +.dެ9PN#I;Z1}v,l nrkǃХ<[\gqՠ2Fuc)Ep Omr幼ngx\,':ޥ \4N>7j۝=Tmԑ#wPqok7xAWp- Ѹ_92"Տo,2GH :V,H }!rkI7^(c;&*?GYr<Ӂ91Y)c5қS6T'~ȜE(d?h7ѡ?t*iy6*q.e<ړ5or~?"o"Oueg4ivBi^1MV!֬7{=̎6Dctp=w ToQjِen^}'*G-'XmY'-l{SuW-7'h'^r.sd,T$2+bl@qջDXO8*HH8͸XR(O,:C?A'B$Ws qN٧jO[u+"[A왈3F]9V+B'G2Xi7IegX$gJk>D4IɫXZ;%2Emx#`{:5^Swxk+\`j+V0b=s,n|FE#aPkr8nf  bk<ž?"^W*#(>=kUQ|#I+ܵ+?VZ-^$ zfZFDhzE0.ٌ-78fi?xB(ى}R^X}mbL^݁Ȩ1gc!2qnF0ˀN,?g(y,A묦$3Uk] $;ڞDsBk8ZC`[ӝW'Dƒ*DOЩb?6<>)J}rEVV R_hs\2Q=n;J9!^JG_oq4(i]09FdB/:ր7{!AsQFHta 㜷K]8˖'vsشS5>qլ;g7xwGHt-# (<-ܤmRJen9\gkt!8T* d<fEo b}ĘpĔ[6}P~õADžCĬ{HC1kzsqAmWԜv`ZZƬ~|6N$Vt MH7333۰o|3UM,#m[l3doD@mM_\ ᾭݴ#y9wS/W+v5aj?]E@2\r_pÙ)N?E/0|X^Cb~ g&)j ޟ gqfs4GXNÞj'l1BBV0g  Wbd-$V6i0-(Fܚ/a Caָ{>4$&gW,ło9 * u8*RNK/P5>T~ 5Ϛf(hcq(k͇mЂU@25Cn Q'ǖ7G7CnzCVuhB3#zFؑCO5ָ9, POH[B5ILѕ4!tg> /ǰ~ڈroj !I swšB 0ƥEOStDM_2f@޽]G>;ץ70pevY.iV@KxkMΚ7RM9曝 >ؔ4xc4:"M=r 7D NNJ6)#!P*6E:'oS/&; 5zOvY t ֿ ?*hLkToV4%52j@[ -N#uG# w^.$2{"ѠM?Rkd;up39)TLMirCʹ&eV?G^Ec3qL銃T FimN zVA;+YNE(+ﳝfiD~uk$l6*jkNW[ =W*Ż ӓY$ sz'|@=S*=PyĪZ3!h!5 ^|rxǁ>՚A(LR/}hұWFwKpildf$CfoRaYp݊ݮWG͠xQE5*px* mżEXaBRJrQ8m̷dR- d$WT cŜXW勃g)y{;5^U=W^2-xa)zfK8Ed#mkZ* .zBIPʫTY\pufڈCR7p'#E$. B)kUM.orH֥0^ZE/@`Q|ag$WFr&m^)h_hB endstream endobj 493 0 obj << /Type /FontDescriptor /FontName /FSPRBE+NimbusMonL-Regu /Flags 4 /FontBBox [-12 -237 650 811] /Ascent 625 /CapHeight 557 /Descent -147 /ItalicAngle 0 /StemV 41 /XHeight 426 /CharSet (/A/B/C/D/E/F/G/H/I/K/L/M/N/O/P/R/S/T/U/W/Y/a/ampersand/b/backslash/braceleft/braceright/bracketleft/bracketright/c/colon/comma/d/dollar/e/eight/equal/f/five/four/g/h/hyphen/i/k/l/m/n/nine/numbersign/o/one/p/parenleft/parenright/percent/period/plus/quoteleft/quoteright/r/s/semicolon/seven/six/slash/t/three/two/u/underscore/v/w/y/z/zero) /FontFile 492 0 R >> endobj 494 0 obj << /Length1 1608 /Length2 10493 /Length3 0 /Length 11315 /Filter /FlateDecode >> stream xڭteTݒu!Kww Fk܂;www`!X\>;~kf~|֧".يmдlԐqheipDɎ3œ4 p34mD,Xe{/BcͬsUN#O4d"s1Oc^ޤMCh{=βմV q|P~)ś_&3> '&TV{>Ft0ud@Kr\Hl3 PƔ{CSu;noSla[ǒoBXJcFUF/iw=o<z~;7qED^x:.XP_^:5Y SP!^/WⰋvN$Bj >gηo p )} T/HN4w<:4ٲ0K3CeX藀g̔ďF=x \n6fr)kf%b/lC.K%5)Y%kŇCr)E*{SEۣ*7<cJQǻ㞍R3!0#Z~,  }"CGyW#Yq4ܺ;fm҉ʅ]K1I-%F Um[urqM:A=Ŝ&FT:j3o^YhC@QuSڋp{H,(Ws}ߧN&NkS̯_ /~ v h)19{Y؜eUD,F퇤\| Uӂ-DZz P5Uk aV1APU@NUHX QxG7*K'αqL'tE,7[M5h2'Em;(?!xs`^]1Zޢ.$G,H0/LX\.#qЎ|i}Adxe |]%zl$[85[ xZ3lԺJgPuxMF OJԁ9%JMyMTP+R ԰ p:|vHK2Zn$tzFDil(Na ː/~@ofHNămVs%AIUObãV)3(B)SяY:Lf9#g'0.U'ԏL.A*}e~ 0i^n6o۶P eK @ݮtQ3ntxl a{@!d6h55>JVO\hOe(R0MK@":ӊ|-o l,ε.RK@f,u n`# 7=` d uYx|j"wx)d9}\A\:WۥifX`E|^1vz; r_#g%*cJ~}oTEDY ,W ~:C>v۰\;D~YZ #Gۍ&eu(UT.['4Ɲ k&ł;e,}bigt8YUzTq0jZ]}dBt˄RT5jq: QmШs_l?)>2 qL~kI; K_Nh>vOB6>uώƾg Uyitmw1C|UـhDzqex"qLY$ S,cWDf﹎w%E`yE/xiS-@8F1RJԷqKz}4bpGjg]Rv֩h},P9P:HD~=^?|/_g"{YΔ}nbZS31abf /4߶/"Z'S&L|Ϳ?l*Ekݪz=wQKWj )aSy4U,ԣߕ(7PIWTi> 2Vcp%Z+ikuɎ].uvp!۵T,}@,։g;mm:C\G7G ݽ0ӑzɱ@zt,t( ^(]KYdת0]bʄ%Ib4"[/|.ADOYԴ3GGq?52k ݙS&yai:3 |{4>jݯEj΄jbt)I}PȓeN/pn VYf;r>맕hݦ1EWn-Rֽq3L1C9FBrO/ahyϳh$&q,D%+mU$e;H  Yٓq!yNC@T|n[mBPI\^d +@f(CڋMC83XIdq/WG9FQtG!7Y=W&+M:fYSS=:tF,Ֆ8Ld[%Y<`Ȳ|P=yWi%į^e~)mGhaN@\ 5<:WN;r]ۧd e/xLx4&wvAtf:&I~{W@_S6^ j7v[46DsrMbiщpeNF&R.~"џ5m`᭜¢rǩ< kq%o-yt(|#4R5*40i uUrK~%нrPK'JYtiGЇҰ&> Z)q ߇?e9Ī*1,GKG2 OnaBW .ǦJ2P@,3xCҶN)TԬ.:h9@b~k: guՔQX3 G QUIT,`(m 0d|-W; #E:ˏ72ʦSdC/B ĤU +tVI 4V8 |<}'6g.V!}ۀ*Eb~RV=05#c.蔏`G"ZҠuec< $.rj%VW3Aj"c菗 R ͗JZijYH5>g6c7:FuռGE9S>d\M=JY2i$c>Vx"Y [&km!ׁMS[ '7/5&!1P{ݵ/,H#;RVTnvxNG¡UG.L5zcDKq22B5\ I|Ɇc2FfWPQ~V:c|ںbFUڌ/`m(I{=%Z1BY:WR(2H1^+BnFflf\%X TS]fye C!mA"<=9%mCm;%K\ۮ>1ˤgU'mz* %傛FFw-.\e?sud4̟j w= 48V=!Gtv~q/3D"")hbg'˵L2\}Q{"Xmӱ^.[3SvR \p1byzBn$~ WգFݢeUryBO:MjrePH\kM5 /~ Qs9^ _s7x`c 8\4sVPvj9 /DEY?CXPѶj0ȥ5%9XBO˷8  wt&D(kaA^)42? D.qV kܗ FSs3Qyf-ϬËPIqRʞ0dn`wSќ@]I Aŧk|hćOfJ;cn~n,L=?T7|BвlT@+;G)eCCk)S/-!lLH{zu5xV JajB`C5`;蟗zzMz~8`L1 UqW"$h+4\`;Z͞Pqy#y|@iQ9y| GE ̈@!aYv/3Bх`h"LG Ǜny'7Qq,)1 ,fϐ~,ަks=nQt4 T uz"燠Z N*od*Gc TlLzڽi [Yq|-v,c[;}^G,o;-iUt9 }Qb(e Qs @L1ޜu?s=2oˬ0 `7<.- m9Hc5R#Y‰WTC$_q|+yw^ȉUCE: q9GU5(_ <)oLu0>sYnRGKc0ypc<*(V^ eb/#6'~av WPc]n߁{5=їEuh'K)nÞ%LQ,K?6d"܅ܖ%ʯ-03eL'GMq~%Q˜va(hNSKkbg{g-|]luRQ켲ԯb(} sZR8?f?T]OH_t8]`al=:Ur[xX*ه 1>2А^ `>~6`f8wUrJᷳ!O8 C#\XH|z3СwB nW󄺕(T [K佒*ȷZП'N[(enbIJCZW*[5pVXq;"'f j1V?X2(/ȵ߭DRRKgw#e'˩p3'WOQk|C4zI,8yqYKdKNXm?b"I:J}Vf*u_v1;>|71v*֕ 2{GI&?[QӹPJ:ϙ̡y&u!?y3 /ٸ|KM;V:jp?4x7B? V['<(bʛ# Ƶ/p<`|u!8&;-Qq^#:^Ro֣Hn7apexExGdFtV!4wy8 R}{SʦhgJ O~d,(1ꚺ~Bס,zyԴI3K{NPYU3&k/+|èdT"THg>'tCC{-1YW| Q1:#rvJ|7g.4Xbz(FaM] qF0A/%r󢀔c} ^eĶ8A]NXt@37PȵO }CRF|ОSSf;_/Ѽ,N(Wyy8ʢWT!Ytjq0x]k0Cґ0鹽tz#_J&"6\Ny㥯BiV|B|Ρ$L,@<ԼBę+%4v_ 9sx~-$Ɔƛ\dg S[~\XZ`ǝ!ԿQªnIԷ]pwh%o}gJlh>˂W،\.YM~v*YZ? 5 ݩ:Ʌ?,I"dlƁ.4ꓡ>sof5ޅª 7#mmgȡa5<Ki%m9눁Vv5S2"?;:&sVCGK˓tx w<:Z0ez1W ?+T SikY=DE P?` =b%m9Cȁdu%"Ԉ9b^k m[lݯt,{lH+ho?zl7]6K?{U`6HȐ0 ΓZ/4ŝB{F6M?4E7i|ǟZUޱ3Ddz:XFP\$(p{'++9sw%Oe̢$+c|E%uP䂞B.+ӮeQ ;dʙG41In'l=TʘHdwEl'AcF=vi'9.pZCFwUcoeL&xxqObXh?kYTP0.V}MCItS Jy  "Z]erV$mʜ:XكUGka_{h^sM }|bpjQ8,-r*߹OIُ?ma}ʵjӾfnK% NvS{6h~;uA`j) '?a~Bm6C;p1fn2ܙ'=~[ |&GrחnruBK-;FP?aj|ܬ$!C<a- ȥwo_ܘFxpV B}TkSi"?b!dŽBc ϢUv6 5Pp\Y>ҥVx{cXDUKCSWZȲ!xO*˯}bA@Ekz^N4I6AiNNmyCfXoN~Z`+' edGMܝk`}CfIm蘩yp7ͷ4O";sӁȡ0fpkԼ:Xβɢ~B(rh|KZx/ :3rZ:,BlKoV%{S&Q\jtVf r6dy"o]9~=F!ȜSIon*HZ׺mT`XϢi99KuQ,,|QBclcCe?G ]0E+ TW՚gÜ޸)'o} 7ݚKZ|br7XmTAYE(H!6+'~Gc >`7vaۭN?z])a"{]7i˺x-+ "8ભ8NZassVdSlg'DH5_̓6dx"*!+b>1!0S;34"2Zf$Sc 5rԭ׮zaOI@:cj1AR.gM8]@K[ ^r竚v Z[rwRn[ӏvG#M2HF+rS=1M鵁0MC9JZf ů.Ҹf,L3 ۛQY&$026{Tmݾg>^쮄`n1:2/4f j9n3diw장<5ײ .G=[ሓ;4,Z< z*7 an* endstream endobj 495 0 obj << /Type /FontDescriptor /FontName /GPWYBO+NimbusSanL-Bold /Flags 4 /FontBBox [-173 -307 1003 949] /Ascent 722 /CapHeight 722 /Descent -217 /ItalicAngle 0 /StemV 141 /XHeight 532 /CharSet (/A/C/G/I/M/N/O/P/R/S/T/W/a/b/c/colon/comma/copyright/d/e/eight/equal/f/five/four/g/h/hyphen/i/k/l/m/n/nine/o/one/p/parenleft/parenright/period/quotedbl/r/s/seven/six/slash/t/three/two/u/underscore/v/w/x/y/z/zero) /FontFile 494 0 R >> endobj 496 0 obj << /Length1 1166 /Length2 5787 /Length3 0 /Length 6555 /Filter /FlateDecode >> stream xuSu\ۺ&RSZcAA R@BI{}=zwg3+vU8 /$@<= @0-'GO-("`g7\!AJHÔA[ B!$P\RLv +!np?ABa2 Cx"P xHn;Ϫ%8 utBpIHH|b# q+ݦP ۦc@*`(.' !)(pAn1%}ۨ wfP$v(_{vD !(P(@{'% }?H0G${  P{x{ g!A($`߿VV \}A Zy9)nrQBL_:ꁠ'08@qnuk$/֛?l :p:@1knBRWcp)V" ܠ?M ɣB` ({??!DBbB .0]AA`TP#uJ/7mDݞkm|K97)y󯏊>4:d%s\ѡv)}y,7$[c,N%b.y8zwFiI<^ڤXoJ̕'fJ#d g[8t[3Bt]%q;ΡwLs>6vzOH =Lp`GgVHsKS|'[|͟ LP$1,SSiI}ǣ˜.$WzD/R{j}Y]8k혃;t {Owi<"}|Wf!3mY|ӳM6Dg/qyBƲsH5C!P8!y@5P,Ael_&o?dg}ׯd6cr#7öu}|tVGJ}}|fq,KS&/NE(yRF8ꔧ-?FX2JC'y>:rM nIE֖ O@Ys}cֻ$+s9t MYe\Pmf15/G/_=(v;}؃f>ŶH1f,xI om=%N2mLKV5/O(vE,U@rZA·^EL7qy@4Z$~]} ַ2ݚq;3uwMsDR ?t5  m*7vgiy~ ݉*"ʅDŽ>%!4턽'r݋T\vЪ,OM{\,ȴws y;grXHbDaEvW {r.yy05Xf?ω3ޡ+Z1)P+g{D%Λ ǕZrh̏iEGԥ xswfU9}!SU`ó2W؉v܌l˕ 8XZ MSBE$ ^OW}~@9QԷ^07roJ.igKhr@ `3U"[Zs|l_39TAZwYg i j)T57ҢPȺb̫]Y*NE~2aY]El/lDbFTp)/wliiYY'+f𱛢XF B_w..sg9c1wLbE[wXWer r?j#Fw7r2˗ rG= UK`WD2SY \RBIoD94~6^8c!w\~`ܵ4e#vKrQ69bB- }ae*aIb$"'=5.j`RVKRӓy;+e5)i}bZ`^Sӄ7t~ 4hu5yTV`Ngxs` o.e\3eg5EL96rqip!]N~Tw' _yi>qLG?pm}ld,HY";uAjPAv$6͑mJr Z;{ u LfdK?|1P}eA%|&nLljizPAQ. !?&՜AEXsZA1_jpz^LǮ|wӦM0+6˧XWTU_Q]2Q]gئhp9*lʬǫ|TY25*.]:Ƞ^jn D;T?|Ahd~aXN]4"Aw቏<\HC07RH0y0\RFe>:9y:ijT:9)P*ꯃߎЖu~%(U-! M8,jw1}ҁ,gso9CtcBQSIL+Da~2JT,Fwh茱}QZO_ qlfץ=`M#x˚%-6D xqNgʪ+s",6=830_kCӇ$t?X1{¼67TɣZQA8=]8N4E{&2i| ^lwAxan$)W*d+J=eLȟWs3 ڈP2.L´#.EXSkC甅"!BcfrW!+t9Cu-Eպq-X. O]kBd1“` 7OR(xe2qKDWNJ;īel?Z!FkjVF/*r R33d߇m:vĂ$? `DHl8|Ŕr{i _, u1TzmGk{sr(ĝb"|Vd|8gVc+PA\KUjSSX׵ tnZL0`<h9 Uh@Yj>܉1M 8NKɗhrpѤnqHoq(6q y?s 5!1ORo5lfIH:R-T;T 8!x*)TlJ0skw,E~:t= Pza:F<%IFRi FFPS$ߥIhwG3 ϲ2NMu"h}-}ݢDioFR";/<2UTƅ1ֹIĴ:#men|J:n.O\c!IZ(C.N.k;<'7o^';K|z|]1XUB\䏃18d,Vehc0pv|TaKTՒlVOGoΏh)ʐJUJ=?| >Ecr[,C]]=>#weh<6/tfʟp\GDӆ1V~YXY۾3r]W3=<< .kMf k@Y*~"[3Nne&.ow;zcrNLaWsU{k}(6>LZkuUzc:7gRamPs#Ð`AMbR ;po9X"p`$!k;)ހ"(,B]@5UgiEyΙϪl:\knP $\( е0,_ .1orMS!*RB9W/4"[ѕDYi,Kl]"=yPxkͅԏ-BmP[g+2t- iMbϢk/֤bőVcd8N\zq0mcpQݤ^K3iZ1%)[0/tNC٧v^D!c͒tGFl鿴ekUj>n W endstream endobj 497 0 obj << /Type /FontDescriptor /FontName /HAILJB+NimbusSanL-Regu /Flags 4 /FontBBox [-174 -285 1001 953] /Ascent 712 /CapHeight 712 /Descent -213 /ItalicAngle 0 /StemV 85 /XHeight 523 /CharSet (/B/I/M/S/a/asciitilde/b/c/colon/comma/d/e/f/five/four/h/i/k/l/m/n/o/p/period/r/s/six/slash/t/three/u/w/x/y/zero) /FontFile 496 0 R >> endobj 498 0 obj << /Length1 1626 /Length2 8185 /Length3 0 /Length 9019 /Filter /FlateDecode >> stream xڭVuTKK;[:b[nA%$w9w}=Zxwt``+ Ѐ8Zj5`"j\`+.$C`P9 8X`ȁA~~@X 888e1b0?y v99Gs ۂ0@VSHYCPC.-7K`65A [s~ĒvX\ cv8]!W88 ܬ~x[rr=F8>`pW x%OwmWȣ~~[@8%`qur|CN {:_prr O+&X+Pk/?|.{gIXX+5 XS'Ay]r n x|d,wи9 Gh_le`V)-G" y/#U҂Ak yA.(Q?#pͧk C ;Gҗ1?ZKtzìy ##xs ?p =޽GB"¼C?@|:[] Ǿyt^ F Y^qi< ?v= yH]Jz*,{Io7`SQnA^@%?%l]*~BsnK}{ԁ=(ڗ'`viN7HY5ca^_hޢ=hp8b `x@xJ!iǯG ?Ɯwu?248g#+Î/.ir^G~ AoXeQ;Ϊ5Mݺb?\U噣z~ЫHBnU{5(MsObJPb(#;XxxQUʫYtGE)V %}*ҸLێw#ʼ9D1J|8ga8(7qД,Q/^'ÖC)9hRbql#떢d.oWE*'azbM@n7<)ZyR=,SpB0qH{' .2ད>-pcY< L0uVmBi![!B aJN9LN٧!Qeߨ jB rvt<݀p"zMwŤ}m܈:12rSNMȓqHMkKyCL1MWbS&^)y6W\*XQEq+vK@ ^~t=hv}Xݬ]"7>kɈZ~$`lG`@6}G-{ٽu0SrcZyŋ+G\:W̰AP/hu"9d$xqYB,b̟?,pJW\}dj³TIw!+i&pd)ܙ\"'=lwXqMOE~HLmÞHt8@'-\ڪihP@hTei 챭kEAEflk:ƸAS'u??/ӇdDXDVbicИ#FS|!%|;4Kx9VO0c}IpZ ɏr 6 [Tm8ߙNRl¸Q6E 1 <"]Tvy5 DqXvZPbDʳ2J(I|ެgحz @Ƃ1A*L|H;ihOxS3OH#Ӹg2MEH,4)]I MXZȎϴf eP g9p1DP$ͨk'2xR;QFF0 j@0Qy{' ^,fv?.zc,K(ڮm"?@@h̝ĸ,Q X Eg!"K3`YVu uH|qbY o{ =󣦇׼F$O=ZǞ 4Jo VZ^..`Tabg.o@Øi|]b WpQxɂl֓fJv[0Y953$Ԙ{it8Y_VQj!WXDj.g|j_›x6\I }I,dJd kqW{r1noyz~p\0nTs8|񣁺ЄLvl:mKMHceTEV "-+WjO)IZ RZ z@%œ>7/TpW.M^q4Wv ŷWJ :5),FĚLZqt,g.&N@ 'ز]̓9CŊ('LO;FYG}QXCڽ"i֩7]:qGt?wmR#ƸÇ8udM͖Djgp@^@O'-aq,E>!Y+@btWqq뎛Fܘ.6߲ OC'Tڈ9P9Ɗdg+q7ɃmeH(0IU];Epɵbs|†=?A*K{}G;kq"2{΢dlHWQ|^};f2qdܜ*[̤lSw0D>]@PP^azt)%@ZoDC͢תZ׮k*'GX㵊qʙ?uVvR +fN rqo^1]s%$?(MEŖeM9VbU=aHUCoXq묳9q3p&ܮy\+CaSj~h (.9K40Dɓ)慢2v{cu$ Ez`JMo[짷}|{Y}^L*ýWN^`ϝՌp}N\VWq~ovn|oM[M`?o=YC$@>{oJUIj}ֻ'vA$\o]Wʛ)?I軭&|>uuV} x^8*/eZnVG| U VNܗW_t&rsZ%rblkߠA꯹j}H#}q \AxI`}QKB[heWߍ~a$.%Rw7|S]Ρnh%̫ȗ"ר?0=l~gEoJQFǍM+M$;dy f Y>WUj{#ak$׃Z@q ^D3$;`rs u3ԏ:rw)[ʇ]Qy`gn8?-@AHEeyjVxm)onRG9qXOh_2 _RzNZВQ7hZu )Qo,_)S|$q}9/ ؗ8 6I sUS'IcriMY)ŸE4*,tZlvY 6<@Wv2oR}F,S&2zA)nF!eJ Ò&,tm5'EzNvKרh}FgɈ`ߓFEkqid'yN^p3oz/!Y"֬+ٺNst>4x^C]ja"uX@ΡWȒgV., ' ZP Xy\kFWT ;J| ւmr6)AҪ.db7SSDpdStUx/- zce|46NVBMgwwBJM'$KC^CuTTua۲"YT9_o,NݺuTb) \'ɞW(,[֣}]}ÅT m>%‚RҝTqA^ e]W,TwW; _9+SB¨I:l=%^b#O*^:˝e;oޓYh ֮ NsQuݪّ̓sn, …Jt-6/qTYQmZpןn#|@LvV*Kb+P_}q@bRi1l&Ii(h-`@1U\`оID<Q$:4INmgQdԔV3u4ZG."e An:3r5<}y/9㝩_WC]'NwGGF3RS@V,$= v ~ac*1TQ̽x(J$5QSӘlҜ] ) ;Yh"hP2}iɦHi6%}W0(}e Ѣ[4ruDwA\ڭY(#5\]yO/03Gr:h; &g ]1@nPA}O\P ZϞŽ͜(EX4b'VZbx?S~0QJ$_r/u(i1> L z(/8tz_P` gGM.*!:uT(m@Qϫ#~TeC̐O(/Cp #f D}mcsJ{#]G{^_͆|Q3H"؟C$UK5 'Om]PU2Yh)vq' ^ս^%xg!յ jj /wɐ1J vF"s{#Zl}Cb2T]Ae S1kL)6UgL!& 9[;Z?%\A HLGD?W+FVH3=+ Yv5RkakvbYwwʽ]fgÇP*'buιy:3Ct#&Q/yާLt<ޣ3E^ maME|_nVr{}GEijWfnC=o(0-M'3V%M5 F!OMk"[&1qpfvIHmL=kW]̒QY~(H]tz%RH$<&A @Eson=uJ-,9߈Y]sZ7oB5NJobR,BF tnh~Pr/,"M,4sJŜD`\/f :Bd}$̖97wsHHVJ>:O;lISìjlRXp;,Mxr>V> ޺yZԿ'`N?"j)ͩ([gL0ˠh6/}ng'Аz~3#Zt6]C_x28han)tۅ9u2V|BL.HխY"E,ht_55+6j)kCr8ch~y2"xodvLDI3mnF0OhQJ7D'AĂWDf%li^& hMbR:6zo>W]Wj/"]ln@N+w..d,oS)}5P- W5=lTR,*F˔@W/TqfE|fGkKC(C(ȃ ߴNvX~hd+wQ3byoD~()^.ޕJ^ݖ~y3޸km7i$!}ÙGٟ_ѾSf~Q3bDŽ#< Tɡj/ksZ_HFiTv*N` MTV, 坂l;cj=Bc'x8_+5RIo܍YгGY"^sڽɐ-P+&4-P'zzpvcB0WI#ܩ3(Kn>̭Z_~ @aLCg!2'8O4=p]"@b"Opsg"oݖ2U/2>*`y0I[dk-sgSyMc,Eb`so!Ze-bqdG,LPhR?酙x-m endstream endobj 499 0 obj << /Type /FontDescriptor /FontName /PVAPSW+NimbusRomNo9L-Medi /Flags 4 /FontBBox [-168 -341 1000 960] /Ascent 690 /CapHeight 690 /Descent -209 /ItalicAngle 0 /StemV 140 /XHeight 461 /CharSet (/a/b/c/d/e/equal/f/g/h/hyphen/i/k/l/m/n/o/p/r/s/t/u/underscore) /FontFile 498 0 R >> endobj 500 0 obj << /Length1 1630 /Length2 19129 /Length3 0 /Length 19977 /Filter /FlateDecode >> stream xڬcp}&vvl۶:ٱctl۶;V8o3gyg̜t-\뷪6+3+?@E^ّOI Wυ@E%27qvt4q5蘛$́vv@ dmi Rסc``O?.SttPp7st7wp anp2XXۙ$TTe2Zss@P;,A ?0s\́=NN {kk% \@;7 pWAN ǿmT]\] k'W߬.Gf@Z/_ \3k';9UV[]\g:'_ю5XY0# t]sp[o6wsпD- `fn7%e>(o!7r+G#}ZN;PshL@{k;S1w09WcsK +3뿕.֞f֮@+ߙK`fv0 `bce/6M+k?$pd`_;K׿ge9]WM/n'-&忀SV2qY{훕_JFhh8ݶ t@eG@MFvkn~6Pf͒:>o-??:Nqh/ Q oRw0#g\-*nCsjN] p/ |tB7cwa4Q'H3 ~8} )\>՝]Q[ߗ ) 3ǃ- ep^ͣz` q~g(#--_TteEqg8qE & 4|K^!F=nqֆ:rM7oBlH{uu 6/, >myU-( ,5:̓j$0i߬=E:I&ZzUGX:^l%!șlUb`w|D]/|Mp!|Ͼ6(Z|ekg13~Y!O Kɥzzw@/v3sgDnSwyBaTo}Uq{)bɍ6cK3Jh/+DC(RM GjZyuѻPVإ7C/uuIiJZxlvַ#OYQ)]Ss.:YeX>[.C@%!/sY<|OY~ /[?!8Eڱh)(bOYIUUb|nYqRC]$ U/P&2r\Yʅw}W%>pܭ(9`ʼn2Co8~,Fbb!sN6b5bx%x[1~`FeY2 MuPHvǺ7T}XvȬcm,3Y` ӏ(omk lYu)R+{Z\74=jW"ywP|.:, bl~BmPBi%Į$27ldtLKNdi?F4aGBQ܃>2 %YfTKm:Ӳj]6r1"37wQd];D.$I\e|cp`RaWc&Ȟ'Ԯ|Z5&H3Z_X| wRXDj"daWDKAVUêZ^  -=#旦]zzy 1[Qh朦zW' z PDD#U¥bJq !i8qXAT.Xhh{`.+C@rUWOMR̸I͎DCISJ o!.fhHfهQ28PA+,=E}qSs^`y .W)J=H=g9}JhX5e{چMDbIlhG8O[mKԖR΍!i:,dm{.^@楺\YA Wbn0;mYyqAPa:>-9c_$n9.AQvEhSU0%'*#[L ɛ(.?8^q"% pZaV JpWO˧4Rͨ91> HSU*;;S&xt=)@]ig3@F`6TAص5ܷ׭>G -G 2 $H,!R9,ڋ+A^|^ ҆f_Va۔Buv[AIcdBS=0#DCЗSSۃ7~wdO!}CE# .,UFLn7cVlb#;vBmb<ZgzHxjغ/˃> vj_%qsMžqbE]4'00+Ѭ߾*a߶ *lwE ݭy(Y]̐ *\gn(D{?uiZ;)a~m =z7%L~`^ h5$)YK hCM/^i:2ye B ,KmԈ oS>zdJdl wE Jt6ԅd1*6&ҨZW .a+#f.QgkFܫvaJl 6eYXkVxM#+aOX\nN^/NsQ9͵2{Y3}}!ZOgg~ysV~&7Qm) j W̓ʈk/vyNG,DAGX Zs.abtMyL1-Kct_ ŭ\cYH.Ģ"AV(-wWLHUsp,֧IK?vy=@"К T,F{+,YS4,UxIA'RḖȚ:ҁ6"*@;(t҆2Z ." mx״MN.cee6ƋϹUƃ.ٔd8W Z|mBqax~VXre nr1>qIQ&܋M-KvE[F#>1/p[OΆ+)Plf!S$YG)Xoρ7ٳy]Zp!&~`1 ǡ$-GPmǜ# =֪eB`A }7RVdlNs0͠( ڻS8UỴ U;h *}7hǤV o w|JZx3ɃVi!*-e[fp(Y0 i JvO- h6,ߺm hUz-= ).&zR*/B^tC}xddvllt ["D6o6GgobHNv7h'J' qT8 8xǗ]e):gD7/v+h)*^Hn 'W4{e0g:*02 {Zv6l?G'n 6]u :Wy[IB5*fOcJ֞jRgRu镧e>:>F?)2b۔81;(`_~@$iS;&U1tLPU[kߛWmQHiF%YQZ0D6\^=B-o'ٳJoS*SE-)6˅pviuFzmj<0* 1, ļKc_B<Ự Nx %ET8~|'}[^Q.u<_vfqgI75)۾aXGPb r8% 3i3esVQJɊ5q5%4h!a4!R@H,|[:⍠JcuHKOeGNdȘXW; 9e%䚈Rh),0~y.71݇IK'E~KXk7Qb}Dg{vƶl& |ѱ,DlO]0wO/XE?0 ]X INVA۪y3~2T; HtaiRk"֐|}&߅khj_@Ho5fnpoJ]"YYom9]^z˶LG&M#m_ ~TtW7Dm36Y!z2^ N r G .~;M(Azd}E OS ѻ~Ywy},`.G4.%95*6acM9MNo[N!IUO b(گa!5DA3N7*+GY4K<2Oߡ|3}[*OMd'7r3m昲jQ?ۊ}k%cI %hI]SI˚./ΌMU෱iM֕j hϬo\0_?9-ą*.PDzV?Es_yilX!$3UIN>Ctu"3(&8*`ܣ%HcAЖ d[$(fZkwW+9Pv~a.LV:k^(gx90Z8 n[ީ#WOLr kꉇjj>Qqyw +`ivzud?9qjXO+>:Gb#vvIq5 ;sLCbZ鷰9}[0͓y~U z'x @񪵒Erzȸ} 8j)[-e=瑃nWg1+P!h #L=~#1yl}7&,RQNg1+GZO`T+ zr=`ypG)Rla{:'8Ht?J)ti_]ZUs]JmꚻYI mՖJv둡׀ {p~F d6E5,NoWM)WUmqo ~{"j@짏Gp^>֗CT*L vj?Gͮ\kDwɓfO\~4}|. 3bwEo4.Saߊi/'ƿ"I*0qxlxǻݵĝh7/]JmǨYBȣ֚A1?ZF~6߉>:l r)ε_@"iоEZa?]}LsJ뺸Ji*p tk\){" 3@kMIa Azu$`~Un9_ѢXQ[SwPKh؏ M=clT_\vɤvhqX9LPM7UٽX obyd;%3%}ډ7o!!=N"eq9!Z:-/}8a>uk)n߈.+\P=Bdf*i1 8T?&rMY:OfEQվHGZjc<&VG{%L,M0q优 NO qBⷞ8N`~ѓOhYZ^2 3IqukN+G#q(V6ui+Xlހ"~P֋/~hf@^8fVqUC9 p(EydZMպzzHO`j.7lJ8-89-m&CtP>sWkj!gVݮ!r=~3gG$ 5VЀ%~6^~׉VfWxw @c e.YC~Pn]ruXοl_Txc =(sB{m0:4ZriU Ի||- ;S5~M* Ǡ6d b##"ͣϗ~]RlErl6< qlNj&Y.{YvO6?Sp/iXVaU=<5] uc2I^*sfmTO%?{(uxp|k_(KIނ9EߜkW&K]N˦d/ &IMBy+QFL&F ݗ_o`o7C>ƯC*iOcXk1fsQF"ۄ|4K<[VDq Ŋ$n9'D@V/"gDǯnb^ِ=z*`ղe/D Iӑ~%BsXqT\4c]mZܖXWe Ӵ1{!CŚōTA֫\FU|2SlQ)/C([ۇy:f:]F$TΖS8j,V^}LY|@9kEm㽢\qΜ5GɃ2c^Z:M(&?{%2$'|HIl9M'qb@W. ?|<ÛMwcǨ081 tUKOI-yp&q\֦jp6m|K#eKDM?Aܴ:_L{Wfr3C,jnQ:H!?;U+ݵQoRiԾ(bH$w#-=ZX M/y# |D.%3t&i~P{$1.U,\"ϹQ~aʓia!K{|I4dXh\=_P9&ks{  ; zʑ`CeN1Fٛf뜍wuZM 3AɔcZ[:L U +X+猇 KR */>^Ed?e'FjJj|OJ⧎b6`7gOHCm2=PVWdLU'"xHŨ7IxpSn)5jZ`m|| R’y&[NӁNԫQ0 -iPO>;%ӓwbQv 6A:t%d-+fUC2k<kУ `D冼@~LE2x蝏0qz8pԥͦ JX8w GuBude]Evd!X"]rmp ! ;Rb>iss#%__e:K3~suY(5P~9f>ԈmH^7xQdeUJ0zh!Ai ݝp:ZqHc=>7Y} n9,‡~_W!x~QJ=[vBڗ"&H*uf}p ]`X.{V̳hL:ruleM?Ǵ}0B- H[ 酛hi /-ڪ#q{O5P "Jmܼ'$`mq: 9O6d { ϾW.snNw`P¯Fg Uwس؝mh.P|9 HdIȍuUc'zLm*@@4{zi&Mϊ Hx)h*,'FPOױABvRh&phkf w"?[MY6áWٗ \Xmm1[\&8|cqfdVG\ƛH:]v "<p6bz}(Tb’i<&5'@I}L16(/#^WT)Q¡!iMfY‚VI{M?!qp`y)ePkvJ+և, -<=RLM`d(P^JNDp+d yEdY8| щ:"IaUH7/|>pEImakz:whi-cij}`h~\\E;}X$ҍ-d"6wG{>TBFc„ëuNC 1g ws:ԝ%XVdۀ\FCWיҁVi*|s@4sҕK; h#XaRD,N YnEģ!@tֳ< d2j储b]cd;yM m ig"+l&FһmPrFl)_@1C֮#\wGצ UYmK)ƤS 6cbbQF#1? P*}ȸQc~2cIFXpc͹ B!EF~?^$zmSa[lxH3h}M̈́)+g^B|039g hdndPn /!C$pɟD~&K{eXj e'52 Q}<)W|{f9 .-dd ]6nJg,!𞥕,XtS͘Ac)=JNT&MLO:H?*avi奷qna6n|-[y#vF.a--lyӛ*- ސ9Zi2βV?t 7$O]ŶWU+,$r*glFd86OEٍ6 [VTWg,PPY~ﱾ"G`A"FJ^_Ei]Aޫ=?XGEϜBTN3zylZy5)L285#gѥ ?Iu%fH꠪ltY\Gɼg{yV=WGNF0}Y*]48sW(𥈪FXq{x֊ GŌLXhflQ߹'xח.jqx+l+#h~=7ά`w%s:ˇwlϤDtW ZtF;A ⷓSߜ³]Rs|.x&f4)o{9z8ɬu 3j̃sQj҆]G {f2(tB9Q׾z~b.1i^<_:'|$Uj\?ef>g4w׶ (a$µ`N>TӾ% kn-&ٸO7/9'YW#x֟ލ\tWkp%4 @/4 B”fӿPU;WC-kPߘ7,,t70gpd o}$>,B9 y-.Ds{_=$7m;0KG}DZ0zDPc=h 0kL#@HsћQ>Tjyc6ERe9ÊVNFVc]*qW!͛t<$~amrrPJYe83Î*JEưo /,[ uD#JzɎh zi3lmcJ1;t lZƮ/=~QIufU ƵU|A+̒Wh)-DzLN{^|75>q˦7Q_`̬6ܤI .EѸ6H/Ȥ:Pl޽4f s>X<]{\LAO7εn@|ףM*K0X]ZJu)Uݴ`c?~7 :Kd]PUVw8T1:!KBpp ZQWi%XҼwNw6j=SZQ %4w,jL図S!ɒ[ BLT #JוO LXQNtOehr+R>RڥBq&cP6.e{ֲT`SnFiVn=Ʊv@ɒYL`o|W>Y<-Q 뇃к_a[贝")!V!I>G4yd7#'mtheC,LZ ;HKX8e5K"wRӴ!DsXzfXNPuQta装ÌfsJn:q{#Y|JgnĶeGA ׹Q@8EF߲NO u^8TH `É,y1DLgʎo,۞큩B=N2WUnle РT`%Ȇ K`J@7{nrX>Y6v]^_18uRr_(  :o(es>.H ]~+Ʋ cZ*ٌ͟0T X.IjiȠ<:ucg%k@ m+KQ,_ƖL: :NjPZEfdn\]æ]={tr zti!r_q,q P Z?:زgrg.Ѡ 'OdvT_kFvhyF$A#gb <p{嶛"& dZ>*ts/sD}NTt*d0,`.J嬡TdA"uQbtЀئg=!Wn,0^ a^(.5_K1CWb$?߂' #(;T48 @GpBЋDo Ϧߟѱ xɱJ-?$ܰ]n%L<Kj?@ arfC2鞿 u7\/kB[nQ]lgOr Zk9տ =i1kٓ JiVjS}}4x zPB_3 ЍNC:lYM4p`9-+a%$i@+< .2p4M} (q=[]YI S΍K,T& 9zJJ #z ?} SaE(~+nO4ʽ߱[̣g34ˇ]0j鈿q(X,:)h 3Y}^D.Y![kSE C0/3#F]:w * /RU-.T<Xq'={DdFXWX'IW~oG :b+4o#/= [dpw>NMkU"rC_ vlYQS.Z~!hQTr\]Ps\??wsOomhiZשY Uf?ݓsgJ-h֧xn:~Ɗfzڞ-Nyw`s! \@AvMA{]kgY7F3̥}|Ar]f ä%=sN βT8*]7`<N-Nx"NL" 34̃?#cO^X8#g^iX:"!DqB(o PGjTʳ7T%W RS\Ӏ#{JiܲHg77Oh"u$=JT~FY&xnrL?HD֝ Md-Cq]@Qb,QwL C m&; կ_rLZhPZeL=Ϩhߊxšڽ%:ge=2<G[yƒkA=Yc ,; 9/k}qqZpqZ^v,cdOɨ5\_Ųk! 8UOÌ0F p<|QR |\H9ux!yY6bkp@ swdtJ HSJ fiZOup*fѹ70EB285pm }| p y۽) I{/^VxbmRޡG.a?EIQ61TH^=Ns Ғ*_II".?+5!yX+ϓ>~ubp4,D-Ů"ԭ4}Hgf;5DNsJ)3@UV%?4Hniym 3^^YpŷM0g/|ůM`¹76kL_zqJkZn/09DKZ GvbM՟8U|DCƲn=EKg\rmĭ6gӢ9Ӱ^0_luAdXC Q_ x-DP 0UTSg*l1-RhW}EG :Bu &XlO_.P]6>F&b6`/ýZvȽDϏ$;v*;|"~B78@Kw|zA|zZ)[-ŏQtFYIXTf{4R)({d7 ?|ch]E3 9z#|6a_nͿou37u@ YV7j^EA('< MuH O}gw=A'A XX#{hQ1HS_(g+2ϲB=`rO$ F4`FL<\vґX?RbɧŬ!2!_!:Q(tE`6N)(#; YA(g5X8z%hiVY‰so/`Pj|$ƴUVC:"So/F&F̍7WoFc -zc^9|½DqP;J݈dSjX.B5; Heo͒+ $fx9߃K̅!i~0[DCmDkҨyu}W]g+lЧ.yƮ@&qZ(i>j@p]tBdt)4NEj3IQǚ319Y . $r`xeIV F4T:k$sf{Ov>7tbx;Ua^ p7nOI}Hhc"d6,IKFC_>W5ԃi"/n`Qh~^1~uo٠dŠс]}'Ux! sQ.6*4hp|64Sp+smnʭ z M/|}\ևo&l`:[@-J(ŕ$pnby輜7q$ôV@ؾ! JJx4YS>ݹ3B28%1pU'6 䇯ku4i56@u,塴(lOMB3ZӰIg`'"Xzƻ(u_}{E%CߟZG<j вkUC_:tC1˜$B|XuHz&&*=ML\v}8>)U [tGi2R"ſ[,6Z^AͿJ8x3Ux7y@ !ݽ@ *=p)Z ʏySKejcvZi!HCHǷ1!bI,{6> o*L:73V6 __dy#K8#1%NrXndlX?K{KĞ>&W]r{ґ,Rk%A+HN4Z'MޮRWm}&w)8a!H7 endstream endobj 501 0 obj << /Type /FontDescriptor /FontName /XSYWUX+NimbusRomNo9L-Regu /Flags 4 /FontBBox [-168 -281 1000 924] /Ascent 678 /CapHeight 651 /Descent -216 /ItalicAngle 0 /StemV 85 /XHeight 450 /CharSet (/A/B/C/D/E/F/G/H/I/J/K/L/M/N/O/P/R/S/T/U/V/W/Y/Z/a/asciitilde/b/bracketleft/bracketright/bullet/c/colon/comma/d/dollar/e/eight/equal/exclam/f/fi/five/fl/four/g/h/hyphen/i/j/k/l/m/n/nine/o/one/p/parenleft/parenright/percent/period/plus/q/question/quotedbl/quotedblleft/quotedblright/quoteright/r/s/semicolon/seven/six/slash/t/three/two/u/underscore/v/w/x/y/z/zero) /FontFile 500 0 R >> endobj 502 0 obj << /Length1 1647 /Length2 12940 /Length3 0 /Length 13792 /Filter /FlateDecode >> stream xڭvUT\.ܥ ;'w N{pwwww'Cӧǹ_CXo7*Hbacbk< Y h xq!QQ9N`q 2O>!Ql,̝*t e0r ~pYـNcGUdZXbJ2_R_R 0%g#k c1:qۂM,*͑K8ځ-@n T;3`;`6v6+w 9ؾ[ؼl,Q%9؎j黥_%{y:-'_@ G;k{w0;pv+F `b rt|y;_Ym`6eFbci׼ȀMml8SrA {@[;d=$2 Bߡ%m{h`Z6@_ rXXwn G3D` ~oru  z';N \P&^;oW"YQYٶ+O4lM϶nO&6O&v~?sz?  x_'ۚ5GN@/;oy@n cy[c ˔TJQqN6`_)aJ _*k^ vZt$β?u`Q0hFyopjm*p8 }tæC1NiE=>?븄%bHI<&Opr7t1~}rqFvEJLb+V1GRx89ٱfɼ\L# 8%_cqn7VuY6-ۚ$T/IZS+OMy2 I:ggC_BɎhj|8 \C/1 *LYOGwU~WZ.E]yTH`cecM{ʔAz \778[o/=\ uC^9QfF3&W"@ﻧD|IP 3ޱXQi ^BZ| B27peTfdY(KH¼A|_\| $m1y?B|9D>Fh2_N4uW!2Q#~* 8K^l~kcQa뾖I"OG6h;ѝg;l颁P+Չ}c,I~yjeDw\|NuE[+2 T^9>O $wi&8aϦRÍP{G'zAYH+H-.-uJr9lr"= y\&(cڞѣ#HqlDaa,wڷ~Ur5;A|ȆwhTŝDhSrN(|FS^1E}KsKC[}qڷvatǝ^pojǎI+7*4Y J6xi#FjK#m@I#)B.wB؆Vo]?wЪі0XSxRi9^䟷Ȳ}[JM!V MK-ͪ &n{Q˞ 蕊gۇ|kҲ[iyu;[-Hͣ *CXj'3Gv!hQ7'>+zXK[;7{ҟ;'{Ao7 X,Y{tN͜3%Fg~g'h l$+.=6o*7b2$-j`t\{Bԕo4cy cv[_g+q_{kf>i|H׋|; 'Y ҆#u7r>5ra~˳|je%^eS۪wEwF#prџhߩ.]bBFw8z0\!kkTv[}y{x^2ş46}7}6౓IxDEۤ 잭= 46mAڷE&>=4s[݌|~WޟT[Y>,ot [ &AdA^ ]e8ec qIHש 7-bF >|", [(m/5;YP`'eowcDe3jJ|kWcOv18Tvʔ?1WLu.#ީBbېW58ti+K>ݛm嫽T`&%iű ; y>khC2%ķR-ɽ+h%k^cpm7v05hzC<'8;eOLvcQ4QJ EgNxJɖ@ֽ iJ&YBxGX:[*r}JÝ݊]ت{߭p s 1KwZ ]b,Íb+| Ubkq ~]Jgsd4&aFjNH=a~g&nZFᾃYA+gD݈ḗIWpkwVH}:6ZFSWH=FwLR&1^䠱L&x$.QX?(w`-Cgv as+KhWpN\:ǂ֬BAEhGCNN{Fg8}V)4⤡1<w57oh\_,Gk[q])[!߂?L{qK\ZWi4 +tvM zRH}9g#g^s_`{ -@P@UPBo ѰI~6!$ :~u[#׍{4c@=j󉚶-MJiHZ- ѯSقinʲo AD h@"<&:b`J?oW5~)W צe<ugJQY, [4gҏx@E#kNk6bFK*.ۣLg q*Dglnv^0H8n~C<sa(ϯ(/)* 5re/x+H 0x gB/ /QnȨXEF6jt4dazTprǾmCZݓQhp'IX8G0j=G`.˺ ܳևAC`U/+DP M΁<˖?`ExXtI[r);>Z}SaI8T~9rOb}<ֽ+hDfDDs/[Ԯ'Xaշl8i81ͱ$+SYoփ}Qʼ_rb?d#Ҋ\vU(\cیL˰P(uփ̒zt0a˽Ezu.l$4M0Y+8y0$ʀk<{Yf2F]m:\r'6ojH bb+x4(bQ(2MĶ]25w^M Bsi%OΓP}F/ CX54㎻;Քy)C N.LmnuU(pe8$e5)lނ{kv-V{i~\-P59pK-R7yƫ_DCPHq Br?wu/`(1Y {Pl^]U춲ۧ9tҮ0J]mJz l3+ fV{]*+7o'dU.eE-Eޝg3_t}1шςӚM?ɎH]Xu~&m(B?Bj$+}|#R>H:,)=7-^N;'(hy8* Ab͠>zƺ:1it{xM\y݆ mgsl\7RGw{dWò",>ƞHX^4/Nq.z9$#ӗZQ `" j9K0l~@U2u%"'EέvjS /TB ԣ*L-,˦\n2ج&z >~Qou"BDY~BubbIɡ*{(%08T1s0B]+~b*X}ɼnʑZx0C$nVIۗ霑]Jl$ /2iUZKo8vw<; &U4+1Gcg9']<2P$,N(`, w\a>-lrq5%tG{ݓjR5&db 'FYn.e sߢRn5vE?;1a %e U`EEM>j9n6,&,-i70@ĸ wP~^@#t'S^~?bMemp*Y7 ]Xc>\qw>@`?dMrif)+((nqUQ:ŀ魱9S0zf_ϗq#Nw'^:16eX]|~ZsD7{ӥ>1N ?֤"7[d'ÌC,"*W=KtNijޫTh~-Nn7SSOJ-X$F͜6^?\#(@4h\Ogkqy}A<䐲p(UԋnDHRoUA\JψQR g.j~RID$xot7ǭ"xų/VD- g >.f3ߩ8(>^f@n#\ j@s&@= DH픱W\"koI^K;M _e0EUq5XV{,ceE mx:Cר)ACi9Bi`y +dȮ?oM36[$~?(6oX )HYztu'f$5Sܮ;!~ cC J\#RI+)"^Mϔlr֞_WX/ -b@[pZdid,B.uͩl &uow!Eet Jpk|tO9tk LroӺZEhst|hb=?ۅ>R!J7L?Z!k!Vl^ G)i Mě8HoGQQ7w<( lRY ċǾ&gizQLA$hZ0mh`n=uV%BNM3 #oj@DF[ֵ,1JP9OZ[X+Ӗ %@R14zɉ4|@ /a~\s3S +"Vom! &S=UsNO_-#чxSHNwN%<F )s=DҶ1RSr_"@-q${!m TM'V#m;<ԅ_!OD?gpH!.p_jg.7o}m/rYm_P-|Xv*D?Z=h&5A{t'.E?S?B8FZN^ưot'3MtL}R)zA/%5DN ue)JAݖWaSA5FV*~E:'p->ĄĶ2K?!bvn&:T0#f*G*xط\$^+ñAPLF o,j߽pzZyfԨ4n'>pyr]ׄKk@aDrl%YEI!կ嘶T8g0s -GSd&U@-N[֗D:=A=U:[92N*W_3z~Um Te Cq]8OX@gWT8oɋh~M7'LQ2PS̕7P|6HGwFHd' Om%Xcҷ>NspSnFFAm[&cB#USnA2yu_ ;g,2܄4Y>x@ɤ_?th1 뇡;US}5XgX ((YnկGtu*+ G2~ecXv]z$ E=Gqj8wߞɧ+ո%U`4 a4JFxMbT"|mŋEDא(]5YWE"N ,K!Y}'}1 @DɢUGK-XK<'AQ(\bzo-˨'雇3[R/2qlDF&~˝B ?M}A8xe@ ~P+~h$ϡ%"h+z {F"nE2rX!/Jbo[0,kځTQQ_"b rX0bmϿO׌8(|6e^4#IUb Tucz"J*ڊAiaîce e jo?hdxhLAK00Q zJ.?z?7$!h?8v^]!+ t\.A)'̗2Rv"m/|[|ek/1]|úYft@ZچVj= H: oEOhMvth1`Qm6*ǔZS.LOeo9;?G7伜펰3r85eΖVO!t8KlY*0 i~~~B|*q kus0ܓĈ`fPy!cB?@!Nj]dR GҌy|WAZ';2[3j;|)ӶWXK|Џ mZDK6I4\G(?TT vWvҰcxFFD31OE#gA\r~Ϸru ԌVx\٢%uO^ 2?p7[eRT[,!VI81d仝UvzFNfDk\r2+0Z:~1B-pRAПjL7i $9iq>R!(7mzb:|I$!$.ү@XQٞ~A񋦤vy5j0}//= GkAe.euD1anH+jbYU-qq uKMP gᣱ^zqA&q0PLylOɪ6ޓu_Ǧbz &g̟^o 5GAvZ^JL$Ogwj;2ڱO.*fDJkܸ:.S`~+Ϸnj1;$R_uQch8| HLPz}8XEr"5C׌_>%=fX#fjڥG*} AHcu{-6cG>M@F;ذ N&  n qzt0_#TۖRR 24x4jq9[\UDr.e rL=4U^b7MPҴhCAwt6GRxS'5+1=arSY*CZ%Dy)Zdz W_Ts?n2TyJrs`^ar-2-jĝDzzn!Im34U$a ,OwO[;+Zi 󰊚M?Z+/P '^KĢ<z,[3&N8:mHޓiz$= is_Xte\gZ|:r3% (X(@88 o4y߇Dtj30;E3bB=)XqȎ>:T.q4R)fF_ > SG7Wц& 9/gi.nhPK`&9KQL(o[x=,0=-%*Fp$#&]m@[OǗT8גΤn:ct Px.5hFqUw*^d^ 0}LV#6߸a}J2fa Y"`! ]w|I>1^~X>/l0.!{yZA)9obX&H@^䳰m=:/ñ!((Cӥ$hd2$F'EgR\c0U 7JEg|超|[[ؑוiw߸自 Wm]В:+10їaFh gN.1ը׹z=8<bhJU)e2y^`zɦ.y3>cPtԏPoQގfdƃL{5FZ셤'a*x#>9L]?F^Vrh*z񳘝}MU`h]7P串4&*yπswPj9ȷaF69"^, ʷaD(`֙3/ʬlɘsO".c k:Sp(QC("&n°u q{*P=}Vg۬t0t{_]!/Ol ~5$NNKH%]ҷN{kK;{xQu:ΉLU,3*=Xjw16H_/u Ҥĵ35~t+Վ`=GԴ@`̱C^l.v-u| Ĝ64oX\ 2rșTsDVkUqptݛV@z#mosss_E/F$<{ Z!>ܰAĒq o%A>J(>ݒO9p62^ Rė)ã͡59< Pu/>uyհt$a.?*QB)%09,O$87D=Urj &@$&^{ IoW>C] 1 7AՇ1쟏aN4f6K?*?D\-(rQ,oOOyRY`5ȵ6=J, =%AN .1oR8V9]i'𵕨n9v(rfBCRz|_؂e;YڒB\wzwoYM5c7),8G-5h)-%k~^Q8nDXף߁GVBFzQ,:fhmpO(w7!K7 pP|Os/|?b @bSw k}1JZhD c0 H,n:?[nVj~t-~*7YkfKo.łK}}pU"*|R寘3a;觐ጳb(K;nd ]!$`?A9p!S'W|Fra)ps:V&3Mp :0z+tC\B.8} [}2kNUr}`c;9v]ָM))M$n 5$RiD%?iRrx-xO"TcgCٖuR9~y<.KMPRO8KxnctO9BcI6@`I(x!?\mJ*1 endstream endobj 503 0 obj << /Type /FontDescriptor /FontName /FYBOQQ+NimbusRomNo9L-ReguItal /Flags 4 /FontBBox [-169 -270 1010 924] /Ascent 669 /CapHeight 669 /Descent -193 /ItalicAngle -15 /StemV 78 /XHeight 441 /CharSet (/A/D/E/G/I/K/N/O/S/T/a/b/c/comma/d/e/eight/equal/f/five/four/g/h/hyphen/i/k/l/m/n/nine/o/one/p/parenleft/parenright/period/quotedbl/quoteright/r/s/seven/six/t/three/two/u/underscore/v/w/x/y/z/zero) /FontFile 502 0 R >> endobj 485 0 obj << /Type /Encoding /Differences [2/fi/fl 33/exclam/quotedbl/numbersign/dollar/percent/ampersand/quoteright/parenleft/parenright 43/plus/comma/hyphen/period/slash/zero/one/two/three/four/five/six/seven/eight/nine/colon/semicolon 61/equal 63/question 65/A/B/C/D/E/F/G/H/I/J/K/L/M/N/O/P 82/R/S/T/U/V/W 89/Y/Z/bracketleft/backslash/bracketright 95/underscore/quoteleft/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z/braceleft 125/braceright/asciitilde 147/quotedblleft/quotedblright/bullet 169/copyright] >> endobj 181 0 obj << /Type /Font /Subtype /Type1 /BaseFont /FSPRBE+NimbusMonL-Regu /FontDescriptor 493 0 R /FirstChar 35 /LastChar 125 /Widths 488 0 R /Encoding 485 0 R >> endobj 172 0 obj << /Type /Font /Subtype /Type1 /BaseFont /GPWYBO+NimbusSanL-Bold /FontDescriptor 495 0 R /FirstChar 34 /LastChar 169 /Widths 491 0 R /Encoding 485 0 R >> endobj 217 0 obj << /Type /Font /Subtype /Type1 /BaseFont /HAILJB+NimbusSanL-Regu /FontDescriptor 497 0 R /FirstChar 44 /LastChar 126 /Widths 486 0 R /Encoding 485 0 R >> endobj 179 0 obj << /Type /Font /Subtype /Type1 /BaseFont /PVAPSW+NimbusRomNo9L-Medi /FontDescriptor 499 0 R /FirstChar 45 /LastChar 117 /Widths 489 0 R /Encoding 485 0 R >> endobj 173 0 obj << /Type /Font /Subtype /Type1 /BaseFont /XSYWUX+NimbusRomNo9L-Regu /FontDescriptor 501 0 R /FirstChar 2 /LastChar 149 /Widths 490 0 R /Encoding 485 0 R >> endobj 188 0 obj << /Type /Font /Subtype /Type1 /BaseFont /FYBOQQ+NimbusRomNo9L-ReguItal /FontDescriptor 503 0 R /FirstChar 34 /LastChar 122 /Widths 487 0 R /Encoding 485 0 R >> endobj 189 0 obj << /Type /Pages /Count 6 /Parent 504 0 R /Kids [166 0 R 192 0 R 220 0 R 241 0 R 274 0 R 292 0 R] >> endobj 350 0 obj << /Type /Pages /Count 6 /Parent 504 0 R /Kids [324 0 R 352 0 R 375 0 R 394 0 R 413 0 R 432 0 R] >> endobj 480 0 obj << /Type /Pages /Count 2 /Parent 504 0 R /Kids [447 0 R 482 0 R] >> endobj 504 0 obj << /Type /Pages /Count 14 /Kids [189 0 R 350 0 R 480 0 R] >> endobj 505 0 obj << /Type /Outlines /First 3 0 R /Last 155 0 R /Count 5 >> endobj 163 0 obj << /Title 164 0 R /A 161 0 R /Parent 155 0 R /Prev 159 0 R >> endobj 159 0 obj << /Title 160 0 R /A 157 0 R /Parent 155 0 R /Next 163 0 R >> endobj 155 0 obj << /Title 156 0 R /A 153 0 R /Parent 505 0 R /Prev 99 0 R /First 159 0 R /Last 163 0 R /Count -2 >> endobj 151 0 obj << /Title 152 0 R /A 149 0 R /Parent 99 0 R /Prev 147 0 R >> endobj 147 0 obj << /Title 148 0 R /A 145 0 R /Parent 99 0 R /Prev 143 0 R /Next 151 0 R >> endobj 143 0 obj << /Title 144 0 R /A 141 0 R /Parent 99 0 R /Prev 139 0 R /Next 147 0 R >> endobj 139 0 obj << /Title 140 0 R /A 137 0 R /Parent 99 0 R /Prev 135 0 R /Next 143 0 R >> endobj 135 0 obj << /Title 136 0 R /A 133 0 R /Parent 99 0 R /Prev 111 0 R /Next 139 0 R >> endobj 131 0 obj << /Title 132 0 R /A 129 0 R /Parent 111 0 R /Prev 127 0 R >> endobj 127 0 obj << /Title 128 0 R /A 125 0 R /Parent 111 0 R /Prev 123 0 R /Next 131 0 R >> endobj 123 0 obj << /Title 124 0 R /A 121 0 R /Parent 111 0 R /Prev 119 0 R /Next 127 0 R >> endobj 119 0 obj << /Title 120 0 R /A 117 0 R /Parent 111 0 R /Prev 115 0 R /Next 123 0 R >> endobj 115 0 obj << /Title 116 0 R /A 113 0 R /Parent 111 0 R /Next 119 0 R >> endobj 111 0 obj << /Title 112 0 R /A 109 0 R /Parent 99 0 R /Prev 107 0 R /Next 135 0 R /First 115 0 R /Last 131 0 R /Count -5 >> endobj 107 0 obj << /Title 108 0 R /A 105 0 R /Parent 99 0 R /Prev 103 0 R /Next 111 0 R >> endobj 103 0 obj << /Title 104 0 R /A 101 0 R /Parent 99 0 R /Next 107 0 R >> endobj 99 0 obj << /Title 100 0 R /A 97 0 R /Parent 505 0 R /Prev 39 0 R /Next 155 0 R /First 103 0 R /Last 151 0 R /Count -8 >> endobj 95 0 obj << /Title 96 0 R /A 93 0 R /Parent 87 0 R /Prev 91 0 R >> endobj 91 0 obj << /Title 92 0 R /A 89 0 R /Parent 87 0 R /Next 95 0 R >> endobj 87 0 obj << /Title 88 0 R /A 85 0 R /Parent 39 0 R /Prev 79 0 R /First 91 0 R /Last 95 0 R /Count -2 >> endobj 83 0 obj << /Title 84 0 R /A 81 0 R /Parent 79 0 R >> endobj 79 0 obj << /Title 80 0 R /A 77 0 R /Parent 39 0 R /Prev 67 0 R /Next 87 0 R /First 83 0 R /Last 83 0 R /Count -1 >> endobj 75 0 obj << /Title 76 0 R /A 73 0 R /Parent 67 0 R /Prev 71 0 R >> endobj 71 0 obj << /Title 72 0 R /A 69 0 R /Parent 67 0 R /Next 75 0 R >> endobj 67 0 obj << /Title 68 0 R /A 65 0 R /Parent 39 0 R /Prev 63 0 R /Next 79 0 R /First 71 0 R /Last 75 0 R /Count -2 >> endobj 63 0 obj << /Title 64 0 R /A 61 0 R /Parent 39 0 R /Prev 59 0 R /Next 67 0 R >> endobj 59 0 obj << /Title 60 0 R /A 57 0 R /Parent 39 0 R /Prev 55 0 R /Next 63 0 R >> endobj 55 0 obj << /Title 56 0 R /A 53 0 R /Parent 39 0 R /Prev 51 0 R /Next 59 0 R >> endobj 51 0 obj << /Title 52 0 R /A 49 0 R /Parent 39 0 R /Prev 47 0 R /Next 55 0 R >> endobj 47 0 obj << /Title 48 0 R /A 45 0 R /Parent 39 0 R /Prev 43 0 R /Next 51 0 R >> endobj 43 0 obj << /Title 44 0 R /A 41 0 R /Parent 39 0 R /Next 47 0 R >> endobj 39 0 obj << /Title 40 0 R /A 37 0 R /Parent 505 0 R /Prev 27 0 R /Next 99 0 R /First 43 0 R /Last 87 0 R /Count -9 >> endobj 35 0 obj << /Title 36 0 R /A 33 0 R /Parent 27 0 R /Prev 31 0 R >> endobj 31 0 obj << /Title 32 0 R /A 29 0 R /Parent 27 0 R /Next 35 0 R >> endobj 27 0 obj << /Title 28 0 R /A 25 0 R /Parent 505 0 R /Prev 3 0 R /Next 39 0 R /First 31 0 R /Last 35 0 R /Count -2 >> endobj 23 0 obj << /Title 24 0 R /A 21 0 R /Parent 3 0 R /Prev 19 0 R >> endobj 19 0 obj << /Title 20 0 R /A 17 0 R /Parent 3 0 R /Prev 15 0 R /Next 23 0 R >> endobj 15 0 obj << /Title 16 0 R /A 13 0 R /Parent 3 0 R /Prev 11 0 R /Next 19 0 R >> endobj 11 0 obj << /Title 12 0 R /A 9 0 R /Parent 3 0 R /Prev 7 0 R /Next 15 0 R >> endobj 7 0 obj << /Title 8 0 R /A 5 0 R /Parent 3 0 R /Next 11 0 R >> endobj 3 0 obj << /Title 4 0 R /A 1 0 R /Parent 505 0 R /Next 27 0 R /First 7 0 R /Last 23 0 R /Count -5 >> endobj 506 0 obj << /Names [(0.1.1) 2 0 R (0.1.1.2) 6 0 R (0.1.2.2) 10 0 R (0.1.3.2) 14 0 R (0.1.4.2) 18 0 R (0.1.5.2) 22 0 R] /Limits [(0.1.1) (0.1.5.2)] >> endobj 507 0 obj << /Names [(0.2.1) 26 0 R (0.2.6.2) 30 0 R (0.2.7.2) 34 0 R (0.3.1) 38 0 R (0.3.10.2) 50 0 R (0.3.11.2) 54 0 R] /Limits [(0.2.1) (0.3.11.2)] >> endobj 508 0 obj << /Names [(0.3.12.2) 58 0 R (0.3.13.2) 62 0 R (0.3.14.1.3) 70 0 R (0.3.14.2) 66 0 R (0.3.14.2.3) 74 0 R (0.3.15.2) 78 0 R] /Limits [(0.3.12.2) (0.3.15.2)] >> endobj 509 0 obj << /Names [(0.3.15.3.3) 82 0 R (0.3.16.2) 86 0 R (0.3.16.4.3) 90 0 R (0.3.16.5.3) 94 0 R (0.3.8.2) 42 0 R (0.3.9.2) 46 0 R] /Limits [(0.3.15.3.3) (0.3.9.2)] >> endobj 510 0 obj << /Names [(0.4.1) 98 0 R (0.4.17.2) 102 0 R (0.4.18.2) 106 0 R (0.4.19.10.3) 130 0 R (0.4.19.2) 110 0 R (0.4.19.6.3) 114 0 R] /Limits [(0.4.1) (0.4.19.6.3)] >> endobj 511 0 obj << /Names [(0.4.19.7.3) 118 0 R (0.4.19.8.3) 122 0 R (0.4.19.9.3) 126 0 R (0.4.20.2) 134 0 R (0.4.21.2) 138 0 R (0.4.22.2) 142 0 R] /Limits [(0.4.19.7.3) (0.4.22.2)] >> endobj 512 0 obj << /Names [(0.4.23.2) 146 0 R (0.4.24.2) 150 0 R (0.5.1) 154 0 R (0.5.25.2) 158 0 R (0.5.26.2) 162 0 R (0:0) 171 0 R] /Limits [(0.4.23.2) (0:0)] >> endobj 513 0 obj << /Names [(0:100) 267 0 R (0:101) 268 0 R (0:103) 269 0 R (0:104) 270 0 R (0:105) 271 0 R (0:107) 277 0 R] /Limits [(0:100) (0:107)] >> endobj 514 0 obj << /Names [(0:108) 278 0 R (0:109) 279 0 R (0:110) 244 0 R (0:111) 280 0 R (0:112) 281 0 R (0:113) 282 0 R] /Limits [(0:108) (0:113)] >> endobj 515 0 obj << /Names [(0:114) 283 0 R (0:116) 284 0 R (0:118) 285 0 R (0:12) 174 0 R (0:121) 286 0 R (0:122) 287 0 R] /Limits [(0:114) (0:122)] >> endobj 516 0 obj << /Names [(0:124) 288 0 R (0:125) 295 0 R (0:126) 296 0 R (0:129) 298 0 R (0:130) 299 0 R (0:132) 300 0 R] /Limits [(0:124) (0:132)] >> endobj 517 0 obj << /Names [(0:134) 301 0 R (0:135) 302 0 R (0:136) 303 0 R (0:137) 304 0 R (0:138) 305 0 R (0:139) 306 0 R] /Limits [(0:134) (0:139)] >> endobj 518 0 obj << /Names [(0:14) 175 0 R (0:140) 307 0 R (0:141) 308 0 R (0:143) 309 0 R (0:144) 310 0 R (0:145) 311 0 R] /Limits [(0:14) (0:145)] >> endobj 519 0 obj << /Names [(0:146) 312 0 R (0:147) 313 0 R (0:148) 314 0 R (0:149) 315 0 R (0:15) 176 0 R (0:152) 316 0 R] /Limits [(0:146) (0:152)] >> endobj 520 0 obj << /Names [(0:153) 317 0 R (0:155) 318 0 R (0:156) 319 0 R (0:157) 320 0 R (0:158) 327 0 R (0:159) 328 0 R] /Limits [(0:153) (0:159)] >> endobj 521 0 obj << /Names [(0:160) 329 0 R (0:161) 330 0 R (0:162) 331 0 R (0:163) 332 0 R (0:164) 333 0 R (0:165) 334 0 R] /Limits [(0:160) (0:165)] >> endobj 522 0 obj << /Names [(0:166) 335 0 R (0:168) 336 0 R (0:169) 337 0 R (0:17) 177 0 R (0:170) 338 0 R (0:171) 339 0 R] /Limits [(0:166) (0:171)] >> endobj 523 0 obj << /Names [(0:172) 340 0 R (0:173) 341 0 R (0:174) 342 0 R (0:175) 343 0 R (0:176) 344 0 R (0:177) 345 0 R] /Limits [(0:172) (0:177)] >> endobj 524 0 obj << /Names [(0:178) 346 0 R (0:179) 347 0 R (0:18) 178 0 R (0:180) 348 0 R (0:181) 349 0 R (0:183) 355 0 R] /Limits [(0:178) (0:183)] >> endobj 525 0 obj << /Names [(0:185) 356 0 R (0:186) 357 0 R (0:187) 358 0 R (0:188) 359 0 R (0:189) 360 0 R (0:19) 180 0 R] /Limits [(0:185) (0:19)] >> endobj 526 0 obj << /Names [(0:194) 361 0 R (0:196) 362 0 R (0:197) 363 0 R (0:198) 364 0 R (0:20) 182 0 R (0:200) 365 0 R] /Limits [(0:194) (0:200)] >> endobj 527 0 obj << /Names [(0:203) 366 0 R (0:205) 367 0 R (0:207) 368 0 R (0:208) 369 0 R (0:209) 370 0 R (0:210) 371 0 R] /Limits [(0:203) (0:210)] >> endobj 528 0 obj << /Names [(0:211) 378 0 R (0:213) 379 0 R (0:214) 380 0 R (0:215) 381 0 R (0:217) 382 0 R (0:218) 383 0 R] /Limits [(0:211) (0:218)] >> endobj 529 0 obj << /Names [(0:22) 183 0 R (0:220) 384 0 R (0:221) 385 0 R (0:222) 386 0 R (0:223) 387 0 R (0:224) 388 0 R] /Limits [(0:22) (0:224)] >> endobj 530 0 obj << /Names [(0:225) 389 0 R (0:229) 397 0 R (0:23) 184 0 R (0:230) 398 0 R (0:231) 399 0 R (0:233) 400 0 R] /Limits [(0:225) (0:233)] >> endobj 531 0 obj << /Names [(0:238) 402 0 R (0:239) 403 0 R (0:24) 185 0 R (0:240) 404 0 R (0:241) 405 0 R (0:244) 406 0 R] /Limits [(0:238) (0:244)] >> endobj 532 0 obj << /Names [(0:245) 407 0 R (0:247) 416 0 R (0:248) 417 0 R (0:25) 186 0 R (0:250) 418 0 R (0:251) 419 0 R] /Limits [(0:245) (0:251)] >> endobj 533 0 obj << /Names [(0:252) 420 0 R (0:254) 421 0 R (0:258) 422 0 R (0:259) 423 0 R (0:26) 187 0 R (0:262) 424 0 R] /Limits [(0:252) (0:262)] >> endobj 534 0 obj << /Names [(0:263) 425 0 R (0:264) 426 0 R (0:265) 427 0 R (0:266) 428 0 R (0:267) 429 0 R (0:268) 430 0 R] /Limits [(0:263) (0:268)] >> endobj 535 0 obj << /Names [(0:269) 435 0 R (0:270) 436 0 R (0:271) 437 0 R (0:272) 438 0 R (0:273) 439 0 R (0:274) 440 0 R] /Limits [(0:269) (0:274)] >> endobj 536 0 obj << /Names [(0:275) 441 0 R (0:276) 442 0 R (0:278) 450 0 R (0:28) 195 0 R (0:280) 451 0 R (0:281) 452 0 R] /Limits [(0:275) (0:281)] >> endobj 537 0 obj << /Names [(0:282) 453 0 R (0:283) 454 0 R (0:284) 455 0 R (0:286) 456 0 R (0:287) 457 0 R (0:288) 458 0 R] /Limits [(0:282) (0:288)] >> endobj 538 0 obj << /Names [(0:289) 459 0 R (0:29) 196 0 R (0:290) 460 0 R (0:291) 461 0 R (0:292) 462 0 R (0:293) 463 0 R] /Limits [(0:289) (0:293)] >> endobj 539 0 obj << /Names [(0:294) 464 0 R (0:295) 465 0 R (0:296) 466 0 R (0:297) 467 0 R (0:298) 468 0 R (0:299) 469 0 R] /Limits [(0:294) (0:299)] >> endobj 540 0 obj << /Names [(0:30) 197 0 R (0:300) 470 0 R (0:304) 471 0 R (0:305) 472 0 R (0:306) 473 0 R (0:307) 474 0 R] /Limits [(0:30) (0:307)] >> endobj 541 0 obj << /Names [(0:31) 198 0 R (0:310) 476 0 R (0:311) 477 0 R (0:312) 478 0 R (0:313) 479 0 R (0:33) 199 0 R] /Limits [(0:31) (0:33)] >> endobj 542 0 obj << /Names [(0:35) 200 0 R (0:36) 201 0 R (0:37) 202 0 R (0:38) 203 0 R (0:39) 204 0 R (0:40) 205 0 R] /Limits [(0:35) (0:40)] >> endobj 543 0 obj << /Names [(0:41) 206 0 R (0:42) 207 0 R (0:44) 208 0 R (0:45) 209 0 R (0:46) 210 0 R (0:47) 211 0 R] /Limits [(0:41) (0:47)] >> endobj 544 0 obj << /Names [(0:48) 212 0 R (0:49) 213 0 R (0:50) 214 0 R (0:51) 215 0 R (0:52) 216 0 R (0:53) 223 0 R] /Limits [(0:48) (0:53)] >> endobj 545 0 obj << /Names [(0:55) 224 0 R (0:56) 225 0 R (0:57) 226 0 R (0:59) 227 0 R (0:60) 228 0 R (0:62) 229 0 R] /Limits [(0:55) (0:62)] >> endobj 546 0 obj << /Names [(0:63) 230 0 R (0:64) 231 0 R (0:66) 232 0 R (0:67) 233 0 R (0:68) 234 0 R (0:70) 235 0 R] /Limits [(0:63) (0:70)] >> endobj 547 0 obj << /Names [(0:71) 236 0 R (0:72) 237 0 R (0:73) 238 0 R (0:75) 245 0 R (0:76) 246 0 R (0:77) 247 0 R] /Limits [(0:71) (0:77)] >> endobj 548 0 obj << /Names [(0:78) 248 0 R (0:79) 249 0 R (0:80) 250 0 R (0:81) 251 0 R (0:83) 252 0 R (0:84) 253 0 R] /Limits [(0:78) (0:84)] >> endobj 549 0 obj << /Names [(0:85) 254 0 R (0:86) 255 0 R (0:88) 256 0 R (0:91) 258 0 R (0:92) 259 0 R (0:93) 260 0 R] /Limits [(0:85) (0:93)] >> endobj 550 0 obj << /Names [(0:94) 261 0 R (0:95) 262 0 R (0:96) 263 0 R (0:97) 264 0 R (0:98) 265 0 R (0:99) 266 0 R] /Limits [(0:94) (0:99)] >> endobj 551 0 obj << /Names [(0:delta_creation) 257 0 R (0:format_preunpacked) 409 0 R (0:format_unzipped) 408 0 R (0:gcc_transition) 475 0 R (0:getting_worse) 321 0 R (0:long_time) 372 0 R] /Limits [(0:delta_creation) (0:long_time)] >> endobj 552 0 obj << /Names [(0:no_incremental) 297 0 R (0:no_indexes) 272 0 R (0:predictor) 390 0 R (0:repo_howto) 218 0 R (0:the_enemy_within) 401 0 R (Doc-Start) 170 0 R] /Limits [(0:no_incremental) (Doc-Start)] >> endobj 553 0 obj << /Names [(page.1) 169 0 R (page.10) 396 0 R (page.11) 415 0 R (page.12) 434 0 R (page.13) 449 0 R (page.14) 484 0 R] /Limits [(page.1) (page.14)] >> endobj 554 0 obj << /Names [(page.2) 194 0 R (page.3) 222 0 R (page.4) 243 0 R (page.5) 276 0 R (page.6) 294 0 R (page.7) 326 0 R] /Limits [(page.2) (page.7)] >> endobj 555 0 obj << /Names [(page.8) 354 0 R (page.9) 377 0 R] /Limits [(page.8) (page.9)] >> endobj 556 0 obj << /Kids [506 0 R 507 0 R 508 0 R 509 0 R 510 0 R 511 0 R] /Limits [(0.1.1) (0.4.22.2)] >> endobj 557 0 obj << /Kids [512 0 R 513 0 R 514 0 R 515 0 R 516 0 R 517 0 R] /Limits [(0.4.23.2) (0:139)] >> endobj 558 0 obj << /Kids [518 0 R 519 0 R 520 0 R 521 0 R 522 0 R 523 0 R] /Limits [(0:14) (0:177)] >> endobj 559 0 obj << /Kids [524 0 R 525 0 R 526 0 R 527 0 R 528 0 R 529 0 R] /Limits [(0:178) (0:224)] >> endobj 560 0 obj << /Kids [530 0 R 531 0 R 532 0 R 533 0 R 534 0 R 535 0 R] /Limits [(0:225) (0:274)] >> endobj 561 0 obj << /Kids [536 0 R 537 0 R 538 0 R 539 0 R 540 0 R 541 0 R] /Limits [(0:275) (0:33)] >> endobj 562 0 obj << /Kids [542 0 R 543 0 R 544 0 R 545 0 R 546 0 R 547 0 R] /Limits [(0:35) (0:77)] >> endobj 563 0 obj << /Kids [548 0 R 549 0 R 550 0 R 551 0 R 552 0 R 553 0 R] /Limits [(0:78) (page.14)] >> endobj 564 0 obj << /Kids [554 0 R 555 0 R] /Limits [(page.2) (page.9)] >> endobj 565 0 obj << /Kids [556 0 R 557 0 R 558 0 R 559 0 R 560 0 R 561 0 R] /Limits [(0.1.1) (0:33)] >> endobj 566 0 obj << /Kids [562 0 R 563 0 R 564 0 R] /Limits [(0:35) (page.9)] >> endobj 567 0 obj << /Kids [565 0 R 566 0 R] /Limits [(0.1.1) (page.9)] >> endobj 568 0 obj << /Dests 567 0 R >> endobj 569 0 obj << /Type /Catalog /Pages 504 0 R /Outlines 505 0 R /Names 568 0 R /PageMode/UseOutlines/PageLabels<>1<>2<>3<>4<>5<>6<>7<>8<>9<>10<>11<>12<>13<>]>> /OpenAction 165 0 R >> endobj 570 0 obj << /Author()/Title()/Subject()/Creator(LaTeX with hyperref package)/Producer(pdfTeX-1.40.10)/Keywords() /CreationDate (D:20110510233812+02'00') /ModDate (D:20110510233812+02'00') /Trapped /False /PTEX.Fullbanner (This is pdfTeX, Version 3.1415926-1.40.10-2.2 (TeX Live 2009/Debian) kpathsea version 5.0.0) >> endobj xref 0 571 0000000000 65535 f 0000000015 00000 n 0000005150 00000 n 0000140847 00000 n 0000000056 00000 n 0000000085 00000 n 0000005332 00000 n 0000140777 00000 n 0000000128 00000 n 0000000159 00000 n 0000005635 00000 n 0000140693 00000 n 0000000202 00000 n 0000000234 00000 n 0000008241 00000 n 0000140607 00000 n 0000000278 00000 n 0000000311 00000 n 0000008609 00000 n 0000140521 00000 n 0000000355 00000 n 0000000394 00000 n 0000009158 00000 n 0000140448 00000 n 0000000438 00000 n 0000000473 00000 n 0000011577 00000 n 0000140324 00000 n 0000000515 00000 n 0000000544 00000 n 0000011822 00000 n 0000140250 00000 n 0000000588 00000 n 0000000631 00000 n 0000012005 00000 n 0000140176 00000 n 0000000675 00000 n 0000000719 00000 n 0000012249 00000 n 0000140051 00000 n 0000000761 00000 n 0000000806 00000 n 0000012493 00000 n 0000139977 00000 n 0000000850 00000 n 0000000887 00000 n 0000015067 00000 n 0000139890 00000 n 0000000931 00000 n 0000000964 00000 n 0000015558 00000 n 0000139803 00000 n 0000001009 00000 n 0000001057 00000 n 0000015987 00000 n 0000139716 00000 n 0000001102 00000 n 0000001149 00000 n 0000016727 00000 n 0000139629 00000 n 0000001194 00000 n 0000001228 00000 n 0000019047 00000 n 0000139542 00000 n 0000001273 00000 n 0000001323 00000 n 0000019535 00000 n 0000139418 00000 n 0000001368 00000 n 0000001399 00000 n 0000019657 00000 n 0000139344 00000 n 0000001446 00000 n 0000001494 00000 n 0000019838 00000 n 0000139270 00000 n 0000001541 00000 n 0000001599 00000 n 0000022932 00000 n 0000139146 00000 n 0000001644 00000 n 0000001689 00000 n 0000023114 00000 n 0000139085 00000 n 0000001736 00000 n 0000001808 00000 n 0000024221 00000 n 0000138974 00000 n 0000001853 00000 n 0000001893 00000 n 0000024402 00000 n 0000138900 00000 n 0000001940 00000 n 0000001986 00000 n 0000027236 00000 n 0000138826 00000 n 0000002033 00000 n 0000002090 00000 n 0000030618 00000 n 0000138697 00000 n 0000002132 00000 n 0000002186 00000 n 0000030740 00000 n 0000138619 00000 n 0000002232 00000 n 0000002271 00000 n 0000031172 00000 n 0000138527 00000 n 0000002317 00000 n 0000002361 00000 n 0000031541 00000 n 0000138396 00000 n 0000002407 00000 n 0000002437 00000 n 0000031664 00000 n 0000138317 00000 n 0000002485 00000 n 0000002532 00000 n 0000034579 00000 n 0000138224 00000 n 0000002580 00000 n 0000002618 00000 n 0000034824 00000 n 0000138131 00000 n 0000002666 00000 n 0000002707 00000 n 0000038031 00000 n 0000138038 00000 n 0000002755 00000 n 0000002807 00000 n 0000038276 00000 n 0000137959 00000 n 0000002856 00000 n 0000002899 00000 n 0000038460 00000 n 0000137867 00000 n 0000002945 00000 n 0000003009 00000 n 0000038825 00000 n 0000137775 00000 n 0000003055 00000 n 0000003098 00000 n 0000041816 00000 n 0000137683 00000 n 0000003144 00000 n 0000003178 00000 n 0000042184 00000 n 0000137591 00000 n 0000003224 00000 n 0000003264 00000 n 0000042427 00000 n 0000137513 00000 n 0000003310 00000 n 0000003353 00000 n 0000047463 00000 n 0000137396 00000 n 0000003396 00000 n 0000003423 00000 n 0000047586 00000 n 0000137317 00000 n 0000003469 00000 n 0000003503 00000 n 0000048936 00000 n 0000137238 00000 n 0000003549 00000 n 0000003598 00000 n 0000004787 00000 n 0000005999 00000 n 0000003650 00000 n 0000004907 00000 n 0000004967 00000 n 0000005028 00000 n 0000135899 00000 n 0000136415 00000 n 0000005089 00000 n 0000005210 00000 n 0000005271 00000 n 0000005391 00000 n 0000005452 00000 n 0000136241 00000 n 0000005513 00000 n 0000135728 00000 n 0000005574 00000 n 0000005696 00000 n 0000005757 00000 n 0000005818 00000 n 0000005878 00000 n 0000005939 00000 n 0000136588 00000 n 0000136766 00000 n 0000008028 00000 n 0000009772 00000 n 0000007888 00000 n 0000006123 00000 n 0000008181 00000 n 0000008302 00000 n 0000008363 00000 n 0000008424 00000 n 0000008486 00000 n 0000008548 00000 n 0000008670 00000 n 0000008731 00000 n 0000008792 00000 n 0000008854 00000 n 0000008915 00000 n 0000008977 00000 n 0000009036 00000 n 0000009097 00000 n 0000009219 00000 n 0000009280 00000 n 0000009341 00000 n 0000009403 00000 n 0000009465 00000 n 0000009527 00000 n 0000009589 00000 n 0000009650 00000 n 0000009711 00000 n 0000136070 00000 n 0000024160 00000 n 0000012798 00000 n 0000011336 00000 n 0000009909 00000 n 0000011456 00000 n 0000011516 00000 n 0000011638 00000 n 0000011699 00000 n 0000011761 00000 n 0000011883 00000 n 0000011944 00000 n 0000012066 00000 n 0000012127 00000 n 0000012188 00000 n 0000012310 00000 n 0000012370 00000 n 0000012432 00000 n 0000012553 00000 n 0000012614 00000 n 0000012675 00000 n 0000012737 00000 n 0000014793 00000 n 0000016971 00000 n 0000014653 00000 n 0000012922 00000 n 0000014947 00000 n 0000015007 00000 n 0000015128 00000 n 0000015189 00000 n 0000015251 00000 n 0000015312 00000 n 0000015374 00000 n 0000015435 00000 n 0000015497 00000 n 0000015619 00000 n 0000015680 00000 n 0000015742 00000 n 0000015804 00000 n 0000015865 00000 n 0000015926 00000 n 0000016048 00000 n 0000016109 00000 n 0000016171 00000 n 0000016233 00000 n 0000016295 00000 n 0000016357 00000 n 0000016419 00000 n 0000016480 00000 n 0000016542 00000 n 0000016604 00000 n 0000016666 00000 n 0000016788 00000 n 0000016849 00000 n 0000016910 00000 n 0000019777 00000 n 0000020083 00000 n 0000018867 00000 n 0000017082 00000 n 0000018987 00000 n 0000019108 00000 n 0000019169 00000 n 0000019231 00000 n 0000019292 00000 n 0000019350 00000 n 0000019412 00000 n 0000019474 00000 n 0000019596 00000 n 0000019716 00000 n 0000019899 00000 n 0000019960 00000 n 0000020022 00000 n 0000022371 00000 n 0000022530 00000 n 0000024648 00000 n 0000022223 00000 n 0000020194 00000 n 0000022687 00000 n 0000022747 00000 n 0000022809 00000 n 0000022871 00000 n 0000022992 00000 n 0000023053 00000 n 0000023175 00000 n 0000023236 00000 n 0000023298 00000 n 0000023360 00000 n 0000023422 00000 n 0000023484 00000 n 0000023546 00000 n 0000023608 00000 n 0000023669 00000 n 0000023731 00000 n 0000023792 00000 n 0000023854 00000 n 0000023915 00000 n 0000023977 00000 n 0000024038 00000 n 0000024099 00000 n 0000024281 00000 n 0000024341 00000 n 0000024463 00000 n 0000024524 00000 n 0000024586 00000 n 0000048875 00000 n 0000030404 00000 n 0000028153 00000 n 0000026503 00000 n 0000024759 00000 n 0000026623 00000 n 0000026683 00000 n 0000026745 00000 n 0000026806 00000 n 0000026868 00000 n 0000026929 00000 n 0000026991 00000 n 0000027052 00000 n 0000027114 00000 n 0000027176 00000 n 0000027296 00000 n 0000027356 00000 n 0000027418 00000 n 0000027478 00000 n 0000027540 00000 n 0000027602 00000 n 0000027663 00000 n 0000027725 00000 n 0000027787 00000 n 0000027848 00000 n 0000027910 00000 n 0000027972 00000 n 0000028032 00000 n 0000028092 00000 n 0000136883 00000 n 0000031970 00000 n 0000030264 00000 n 0000028277 00000 n 0000030558 00000 n 0000030679 00000 n 0000030802 00000 n 0000030863 00000 n 0000030925 00000 n 0000030987 00000 n 0000031049 00000 n 0000031111 00000 n 0000031234 00000 n 0000031295 00000 n 0000031357 00000 n 0000031419 00000 n 0000031480 00000 n 0000031603 00000 n 0000031726 00000 n 0000031787 00000 n 0000031848 00000 n 0000031909 00000 n 0000038766 00000 n 0000034304 00000 n 0000035440 00000 n 0000034164 00000 n 0000032094 00000 n 0000034458 00000 n 0000034518 00000 n 0000034641 00000 n 0000034702 00000 n 0000034763 00000 n 0000034886 00000 n 0000034947 00000 n 0000035009 00000 n 0000035071 00000 n 0000035133 00000 n 0000035194 00000 n 0000035256 00000 n 0000035317 00000 n 0000035379 00000 n 0000037649 00000 n 0000037809 00000 n 0000039008 00000 n 0000037501 00000 n 0000035564 00000 n 0000037971 00000 n 0000038093 00000 n 0000038154 00000 n 0000038215 00000 n 0000038338 00000 n 0000038399 00000 n 0000038521 00000 n 0000038582 00000 n 0000038643 00000 n 0000038705 00000 n 0000038886 00000 n 0000038946 00000 n 0000042124 00000 n 0000042366 00000 n 0000041311 00000 n 0000041471 00000 n 0000042918 00000 n 0000041163 00000 n 0000039106 00000 n 0000041634 00000 n 0000041694 00000 n 0000041755 00000 n 0000041878 00000 n 0000041939 00000 n 0000042000 00000 n 0000042062 00000 n 0000042245 00000 n 0000042304 00000 n 0000042488 00000 n 0000042549 00000 n 0000042611 00000 n 0000042672 00000 n 0000042734 00000 n 0000042795 00000 n 0000042856 00000 n 0000044918 00000 n 0000044254 00000 n 0000043042 00000 n 0000044374 00000 n 0000044434 00000 n 0000044495 00000 n 0000044556 00000 n 0000044617 00000 n 0000044678 00000 n 0000044737 00000 n 0000044796 00000 n 0000044857 00000 n 0000046935 00000 n 0000047089 00000 n 0000047243 00000 n 0000049550 00000 n 0000046779 00000 n 0000045016 00000 n 0000047403 00000 n 0000047525 00000 n 0000047647 00000 n 0000047708 00000 n 0000047769 00000 n 0000047831 00000 n 0000047892 00000 n 0000047953 00000 n 0000048014 00000 n 0000048075 00000 n 0000048137 00000 n 0000048198 00000 n 0000048260 00000 n 0000048321 00000 n 0000048383 00000 n 0000048444 00000 n 0000048506 00000 n 0000048567 00000 n 0000048629 00000 n 0000048690 00000 n 0000048752 00000 n 0000048813 00000 n 0000048997 00000 n 0000049058 00000 n 0000049120 00000 n 0000049181 00000 n 0000049243 00000 n 0000049304 00000 n 0000049366 00000 n 0000049426 00000 n 0000049488 00000 n 0000137000 00000 n 0000050619 00000 n 0000050439 00000 n 0000049648 00000 n 0000050559 00000 n 0000135202 00000 n 0000050730 00000 n 0000051082 00000 n 0000051457 00000 n 0000051840 00000 n 0000052152 00000 n 0000052719 00000 n 0000053263 00000 n 0000071280 00000 n 0000071842 00000 n 0000083278 00000 n 0000083718 00000 n 0000090393 00000 n 0000090732 00000 n 0000099871 00000 n 0000100165 00000 n 0000120263 00000 n 0000120856 00000 n 0000134769 00000 n 0000137085 00000 n 0000137163 00000 n 0000140955 00000 n 0000141113 00000 n 0000141274 00000 n 0000141450 00000 n 0000141627 00000 n 0000141805 00000 n 0000141991 00000 n 0000142156 00000 n 0000142310 00000 n 0000142464 00000 n 0000142617 00000 n 0000142771 00000 n 0000142925 00000 n 0000143077 00000 n 0000143230 00000 n 0000143384 00000 n 0000143538 00000 n 0000143691 00000 n 0000143845 00000 n 0000143998 00000 n 0000144150 00000 n 0000144303 00000 n 0000144457 00000 n 0000144611 00000 n 0000144763 00000 n 0000144916 00000 n 0000145069 00000 n 0000145222 00000 n 0000145375 00000 n 0000145529 00000 n 0000145683 00000 n 0000145836 00000 n 0000145990 00000 n 0000146143 00000 n 0000146297 00000 n 0000146449 00000 n 0000146599 00000 n 0000146745 00000 n 0000146891 00000 n 0000147037 00000 n 0000147183 00000 n 0000147329 00000 n 0000147475 00000 n 0000147621 00000 n 0000147767 00000 n 0000147913 00000 n 0000148149 00000 n 0000148366 00000 n 0000148534 00000 n 0000148696 00000 n 0000148790 00000 n 0000148898 00000 n 0000149006 00000 n 0000149110 00000 n 0000149215 00000 n 0000149320 00000 n 0000149424 00000 n 0000149527 00000 n 0000149633 00000 n 0000149708 00000 n 0000149812 00000 n 0000149893 00000 n 0000149967 00000 n 0000150005 00000 n 0000150304 00000 n trailer << /Size 571 /Root 569 0 R /Info 570 0 R /ID [<6F8F82A8E9E5302EFCF10B2E006DB2E3> <6F8F82A8E9E5302EFCF10B2E006DB2E3>] >> startxref 150630 %%EOF debdelta/doc/html/0000755000000000000000000000000012436652150011225 5ustar debdelta/doc/html/x277.html0000644000000000000000000001055412436652150012627 0ustar Todo

5. Todo

5.1. todo list

  1. Prepare an APT method so that 'apt-get upgrade' would actually use deltas. Some code is already written. See also 2011 Google Summer of Code.

  2. As in Section 4.3.4. It would be nice if debdelta-upgrade would actually choose if

    • download a delta and use it to create the .deb

    • download the deb

    depending on which one would be faster. Unfortunately, this decision must depend on a good model to predict the speed of patching... and this I still cannot achieve.

  3. in debdelta-upgrade, have as many "patching thread" as there are cores

  4. upgrade debdelta-upgrade to newer libapt

  5. support multiarch

  6. collect data, benchmark! (some debdelta behaviours are coded in magic numbers that I got from thumb reasoning on small datasets)

  7. support long time exact recovery Section 4.5: embed a copy of gzip, libzip, bzip2 and lzma in debdelta??

5.2. things are getting worse

W.r.t. to when I started deploying debdelta, things got worse, for two reasons,

  1. one problem is Section 4.4

  2. delta backends are bad at compressing a binary that was compiled from the same source but with twi different compilers; see in particular the Google Courgette project, and compare it with the problems I encountered lately when Debian switched from GCC 4.4 to 4.5, when it happened that the binaries were so different that the compression of the new binary with LZMA would be smaller than the BSDIFF of the old and the new binary (!!). Unfortunately it seems that Google Courgette was hit with a patent infringment

so we should study how to reduce the size of deltas, and/or making them faster (possibly implementing lzma in xdelta3; or automatically choosing 'bsdiff' vs 'xdelta3' depending on the situation).

debdelta/doc/html/x65.html0000644000000000000000000003753512436652150012552 0ustar debdelta-upgrade service

3. debdelta-upgrade service

In June 2006 I set up a delta-upgrading framework, so that people may upgrade their Debian box using debdelta-upgrade (that downloads package 'deltas'). This section is an introduction to the framework that is behind 'debdelta-upgrade', and is also used by 'cupt'. In the following, I will simplify (in places, quite a lot).

3.1. The framework

The framework is so organized: I keep up some servers where I use the program 'debdeltas' to create all the deltas; whereas endusers use the client 'debdelta-upgrade' to download the deltas and apply them to produce the debs needed to upgrade their boxes. In my server, I mirror some repositories, and then I invoke 'debdeltas' to make the deltas between them. I use the scripts /usr/share/debdelta/debmirror-delta-security and /usr/share/debdelta/debmirror-marshal-deltas for this. This generates any delta that may be needed for upgrades in squeeze,squeeze-security,wheezy,sid,experimental, for architectures i386 and amd64 (as of Mar 2011); the generated repository of deltas is more or less 10GB.

3.2. The goals

There are two ultimate goals in designing this framework:

  1. SMALL) reduce the size of downloads (fit for people that pay-by-megabyte);

  2. FAST) speed up the upgrade.

The two goals are unfortunately only marginally compatible. An example: bsdiff can produce very small deltas, but is quite slow (in particular with very large files); so currently (2009 on) I use 'xdelta3' as the backend diffing tool for 'debdeltas' in my server. Another example is in debs that contain archives ( .gz, , tar.gz etc etc): I have methods and code to peek inside them, so the delta become smaller, but the applying gets slower.

3.3. The repository structure

The repository of deltas is just a HTTP archive; it is similar to the pool of packages; that is, if foobar_1_all.deb is stored in pool/main/f/foobar/ in the repository of debs, then the delta to upgrade it will be stored in pool/main/f/foobar/foobar_1_2_all.debdelta in the repository of deltas. Contrary to the repository of debs, a repository of deltas has no indexes, see Section 3.7.2. The delta repository is in http://debdeltas.debian.net/debian-deltas.

3.4. The repository creation

Suppose that the unstable archive, on 1st Mar, contains foobar_1_all.deb (and it is in pool/main/f/foobar/ ) ; then on 2nd Mar, foobar_2_all.deb is uploaded; but this has a flaw (e.g. FTBFS) and so on 3rd Mar foobar_3_all.deb is uploaded. On 2nd Mar, the delta server generates pool/main/f/foobar/foobar_1_2_all.debdelta On 3rd Mar, the server generates both pool/main/f/foobar/foobar_1_3_all.debdelta pool/main/f/foobar/foobar_2_3_all.debdelta. So, if the end-user Ann upgrades the system on both 2nd and 3rd Mar, then she uses both foobar_1_2_all.debdelta (on 2nd) and foobar_2_3_all.debdelta (on 3rd Mar). If the end-user Boe has not upgraded the system on 2nd Mar, , and he upgrades on 3rd Mar, then on 3rd Mar he uses foobar_1_3_all.debdelta.

3.5. size limit

Note that currently the server rejects deltas that exceed 70% of the deb size: indeed the size gain would be too small, and the time would be wasted, if you sum the time to download the delta and the time to apply it (OK, these are run as much as possible in parallel, yet ....).

Also, the server does not generate delta for packages that are smaller than 10KB.

3.6. /etc/debdelta/sources.conf

Consider a package that is currently installed. It is characterized by name installed_version architecture (unfortunately there is no way to tell from which archive it came from, but this does not seem to be a problem currently) Suppose now that a newer version is available somewhere in an archive, and that the user wishes to upgrade to that version. The archive Release file contain these info: "Origin , Label , Site, Archive". (Note that Archive is called Suite in the Release file). Example for the security archive:


	Origin=Debian
	Label=Debian-Security
	Archive=stable
	Site=security.debian.org
      
The file /etc/debdelta/sources.conf , given the above info, determines the host that should contain the delta for upgrading the package. This information is called "delta_uri" in that file. The complete URL for the delta is built adding to the delta_uri a directory path that mimicks the "pool" structure used in Debian archives, and appending to it a filename of the form name_oldversion_newversion_architecture.debdelta. All this is implemented in the example script contrib/findurl.py . If the delta is not available at that URL, and name_oldversion_newversion_architecture.debdelta-too-big is available, then the delta is too big to be useful. If neither is present, then, either the delta has not yet been generated, or it will never be generated... but this is difficult to know.

3.7. indexes

3.7.1. indexes of debs in APT

Let's start examining the situation for debs and APT. Using indexes for debs is a no-brainer decision: indeed, the client (i.e. the end user) does not know the list of available debs in the server, and, even knowing the current list, cannot foresee the future changes. So indexes provide needed informations: the packages' descriptions, versions, dependencies, etc etc; these info are used by apt and the other frontends.

3.7.2. no indexes of deltas in debdelta

If you then think of deltas, you realize that all requirements above fall. Firstly there is no description and no dependencies for deltas. [1] Of course 'debdelta-upgrade' needs some information to determine if a delta exists, and to download it; but these information are already available:


	      the name of the package P
	      the old version  O
	      the new version  N
	      the architecture A
	    
Once these are known, the URL of the file F can be algorithmically determined as URI/POOL/P_O_N_A.debdelta where URI is determined from /etc/debdelta/sources.conf and POOL is the directory in the pool of the package P . This algorithm is also implemented (quite verbosely) in contrib/findurl.py in the sources of debdelta. This is the reason why currently there is no "index of deltas", and nonetheless 'debdelta-upgrade' works fine (and "cupt" as well). Adding an index of file would only increase downloads (time and size) and increase disk usage; with negligeable benefit, if any.

3.8. no incremental deltas

Let me add another point that may be unclear. There are no incremental deltas (and IMHO never will be).

3.8.1. What "incremental" would be, and why it is not

Please recall Section 3.4. What does not happen currently is what follows: on 3rd Mar , Boe decides to upgrade, and invokes 'debdelta-upgrade'; then 'debdelta-upgrade' finds foobar_1_2_all.debdelta and foobar_2_3_all.debdelta , it uses the foremost to generate foobar_2_all.deb, and in turn it uses this and the second delta to generate foobar_3_all.deb . This is not implemented, and it will not, for the following reasons.

  • The delta size is, on average, 40% of the size of the deb (and this is getting worse, for different reasons, see Section 5.2); so two deltas are 80% of the target deb, and this too much.

  • It takes time to apply a delta; applying two deltas to produce one deb takes too much time.

  • The server does generate the direct delta foobar_1_3_all.debdelta :-) so why making things complex when they are easy? :-)

  • Note also that incremental deltas would need some index system to be implemented... indeed, Boe would have no way to know on 3rd Mar that the intermediate version of foobar between "1" and "3" is "2"; but since incremental deltas do not exist, then there is no need to have indexes).

3.9. Repository howto

There are (at least) two ways two manage a repository, and run a server that creates the deltas

3.9.1. debmirror --debmarshal

The first way is what I currently use. It is implemented in the script /usr/share/debdelta/debmirror-marshal-deltas (a simpler version, much primitive but more readable , is /usr/share/debdelta/debmirror-delta-security) Currently I use the complex script that creates deltas for amd64 and i386, and for lenny squeeze sid experimental ; and the simpler one for lenny-security. Let me start outlining how the simple script generate deltas . It is a 3 steps process. Lets say that $secdebmir is the directory containg the mirror of the repository security.debian.org.

  1. 
	--- 1st step
    	#make copy of current stable-security lists of packages
    	olddists=${TMPDIR:-/tmp}/oldsecdists-`date +'%F_%H-%M-%S'`
    	mkdir $olddists
    	cp -a $secdebmir/dists $olddists
          
  2. --- 2nd step call 'debmirror' to update the mirror ; note that I apply a patch to debmirror so that old debs are not deleted , but moved to a /old_deb directory

  3. --- 3rd step call 'debdeltas' to generate deltas , from the state of packages in $olddists to the current state in $secdebmir , and also wrt what is in stable. Note that, for any package that was deleted from the archive, then 'debdeltas' will go fishing for it inside /old_deb .

The more complex script uses the new debmirror --debmarshal so it keeps 40 old snapshots of the deb archives, and it generates deltas of the current package version (the "new" version) to the versions in snapshots -10,-20,-30,-40.

3.9.2. hooks and repository of old_debs

I wrote the scheleton for some commands.

debdelta_repo [--add name version arch filename disttoken]

This first one is to be called by the archive management tool (e.g. DAK) when a new package enters in a part of the archive (lets say, package="foobar" version="2" arch="all" and filename="pool/main/f/foobar/foobar_2_all.deb" just entered disttoken="testing/main/amd64"). That command will add that to a delta queue, so appropriate deltas will be generated; this command returns almost immediately.

debdelta_repo [--delta]

This does create all the deltas.

debdelta_repo [--sos filename]

This will be called by DAK when (before) it does delete a package from the archive; this command will save that old deb somewhere (indeed it may be needed to generate deltas sometimes in the future). (It will be up to some piece of debdelta_repo code to manage the repository of old debs, and delete excess copies).

TODO that scheleton does not handle 'security', where some old versions of the packages are in a different DISTTOKEN

Notes

[1]

deltas have a "info" section, but that is, as to say, standalone

debdelta/doc/html/x182.html0000644000000000000000000004630412436652150012624 0ustar Goals, tricks, ideas and issues

4. Goals, tricks, ideas and issues

4.1. exact patching

When debpatch or debdelta-upgrade recreates a .deb, it will be identical to the desired one (so it may be possible to check it using the security features in APT [1]). See though Section 4.5.

4.2. exact recompression

Suppose a .deb has inside a huge file /usr/share/doc/foobar/document.info.gz and this starts with a RCS tag ... then each time it is released, the file will be different even though just few bytes were changed. Another examples are manpages that start with the header containing the version of the command. So , to get good compression of the difference, I had to be able to gunzip those files, diff them, and gzip back them exactly identical (but possibly for headers [2]) For this reason, I studied gzip formats, and I wrote in debdelta some python code that does the trick (90% of the times...). [3]

4.3. speed

4.3.1. some (old) numbers

Warning: this section is referred to experiments done in 2006, and the backend for delta encoding was 'xdelta'. On a desktop with CPU Athlon64 3000 and a average hard disk,


	$ debdelta mozilla-browser_1.7.8-1sarge3_i386.deb \
	mozilla-browser_1.7.8-1sarge6_i386.deb /tmp/m-b.debdelta
      
processes the 10Mb of mozilla-browser in ~11sec, that is a speed of ~900kB per second. Then debpatch applies the above delta in 16sec, at a speed of ~600kB per second. Numbers drop in a old PC, or in a notebook (like mine, that has a Athlon 1600MHz and slow disks), where data are chewed at ~200kB per second. Still, since I have a ADSL line that downloads at max 80kB per second, I have a benefit downloading deltas. In a theoretical example, indeed, to download a 80MB package, it would take 1000seconds; whereas to download a delta that is 20% of 80MB it takes 200seconds, and then 80MB / (200kB/sec) = 400seconds to apply it, for a total of 600seconds. So I may get a "virtual speed" of 80MB / 600sec = 130kB/sec . Note that delta downloading and delta patching is done in parallel: if 4 packages as above have to be downloaded, then the total time for downloading of full debs would be 4000seconds, while the time for parallel-download-patch-apply-patch may be as low as 1400seconds.

This is a real example of running 'debdelta-upgrade' :


	Looking for a delta for libc6 from 2.3.6-9 to 2.3.6-11
	Looking for a delta for udev from 0.092-2 to 0.093-1
	Patching done, time: 22sec, speed: 204kB/sec, result: libc6_2.3.6-11_i386.deb
	Patching done, time: 4sec, speed: 57kB/sec, result: udev_0.093-1_i386.deb
	Delta-upgrade download time 28sec speed 21.6k/sec
	total time: 53sec; virtual speed: 93.9k/sec.
      
(Note that the "virtual speed" of 93.9k/sec , while less than the 130kB/sec of the theoretical example above, is still more than the 80kB that my ADSL line would allow). Of course the above is even better for people with fast disks and/or slow modems. Actually, an apt delta method may do a smart decision of how many deltas to download, and in which order, to optimize the result, (given the deltas size, the packages size, the downloading speed and the patching speed).

4.3.2. speeding up

The problem is that the process of applying a delta to create a new deb is currently slow, even on very fast machines. One way to overcome is to "parallelize as much as possible". The best strategy that I can imagine is to keep both the CPU, the hard disk, and the Internet connection, always maxed up. This is why 'debdelta-upgrade' has two threads, the "downloading thread" and the "patching thread". The downloading thread downloads deltas (ordered by increasing size), and as soon as they are downloaded, it queues them to be applied in the "patching thread"; whereas as soon as all available deltas are downloaded it starts downloading some debs, and goes on for as long as the deltas are being applied in the "patching thread". Summarizing, the downloading thread keeps Internet busy while the patching thread keeps the CPU and HDD busy.

Another speedup strategy is embedded inside the deltas themselves: since bsdiff is a memory hog, when the backend is bsdiff, I have to divide the data in chunks; this may lower the compression ratio, but the good point is that the HDD accesses and the calls to bsdiff can run "in parallel". With newer xdelta3, xdelta3 can read the original data from a pipe, so the data are not divided in chunks, but rather continously piped into xdelta3; so xdelta3 runs at the same time as when the data are read from HDD.

4.3.3. the 10kb trick

currently, roughly half of the generated deltas[4] are less than 10KB. debdelta-upgrade downloads deltas in two passes,

  1. in the first pass it tries to download the first 10KB of a delta; if it gets a complete delta, it immediatly pipes it in the "patching thread queue", otherwise if it gets only a partial download, it adds it to the download queue; if it gets HTTP404, it possibly checks for the "toobig" timestamp, and it possibly warns the user.

  2. in the second pass, it downloads the rest of the deltas, and queues them for patching

Why this complex method? because the first 10KBs of a delta contain the info, and those may be used to actually decide not to download the rest of the delta (if a TODO predictor decides that it is not worthwhile...Section 4.3.4).

4.3.4. the choice, the predictor

Which deltas should be downloaded, VS which debs? Currently there is a rule-of-thumb: the server immediately deletes any delta that exceeds 70% of the original deb , and it replaces it with an empty file ending in ".debdelta-too-big". In such cases, "debdelta-upgrade" will download the deb instead. See the explanation of "debdelta-upgrade --deb-policy" in the man page for more info and customization on which debs get downloaded.

Some time ago I tried to do devise a better way to understand when to download a delta w.r.t. a deb. The code is in the "Predictor" class .... but I could not reliably predict the final speed of patching, so currently it is not used.

4.3.5. State of the art

All in all, I still cannot obtain high speeds: so people that have a fast ADSL Internet connection usually are better downloading all the debs, and ignoring "debdelta-upgrade" alltogether. Anyway, the best way to know is to try "debdelta-upgrade -v" and read the final statistics. See Section 4.7 and Section 4.8 for recent developments.

4.4. better deb compression is a worse delta

'xdelta3' can reconstruct data at high speed: on nowadays processors, it can process up to 2MB per second; but, when applying a delta, 'xdelta3' works on uncompressed data. So if the data is then compressed at a ratio 1/3, then the resulting speed on compressed data is 700KB/sec. Moreover, time is needed to actually compress the data.

In recent years, 'dpkg' has transitioned from 'data.tar.gz' to 'data.tar.bz2' to 'data.tar.lzma'; each method is better at compressing, but is also slower than the previous one; since it is better at compressing, it also defeats the ability of 'debdelta' to produce small deltas (wrt the original deb, of course), and indeed statistics show that deltas are getting larger; since it is slower, it slows down the applying of deltas as well.

4.5. long time recovery

As aforementioned, deltas can rebuild the deb identically to the byte. But the patch.sh script calls the standard tools 'tail','head','zgip','bzip2','lzma', etc etc to rebuild a delta; so if the argument calling or output of any of those tools changes, than a delta may become unusable. As long as deltas are used for the debdelta-upgrade service, this is no big deal: if such a tool changes, then we can adjust the deltas to it, and there is just some days disruption of the service [5] (and people will download debs instead of deltas .... as we used to).

If anybody wants instead to use debdelta to archive debs for long time, (as the archive.debian.org service was doing), then we should make sure that , at any moment in future, deltas can be applied. A possible solution would be that deltas should contain, in the info files, the versions of all tools that are needed for applying. A second solution is that debdelta should keep a standard set of those tools inside the package.

4.6. streaming

Let me summarize. When 'debdelta-upgrade' (or 'debpatch') recreates a deb, one step is reassembling the data.tar part inside it; this part moreover is compressed (gzip, bzip2 or lately lzma). This 'reassembling and compressing' takes time (both for CPU and for HD), and is moreover quite useless, since, in short time, 'apt' will call 'dpkg -i' that decompresses and reopens the data.tar in the deb.

It is then reasonable to collapse this two parts, and this would possibly speed up the upgrade a bit. A first step is '--format=unzipped' Section 4.7 , a next step may be '--format=preunpacked' Section 4.8.

4.7. --format=unzipped

The recently introduced new --format=unzipped may speed up package upgrades. If you call 'debdelta-upgrade' with the option '--format=unzipped' , then in the recreated deb the data.tar part will not be compressed. This may speedup the 'debdelta-upgrade' + 'apt-get upgrade' process. Indeed, writing to hard disk is fast (let's say 5MB/sec, but usually much more); whereas compressing random data with 'bzip2 -9' or 'lzma -9' is much slower (let's say 2.0MB/sec and 1.5 MB/sec) ; and moreover the compressed data is then decompressed by dpkg when installing; so avoiding the compress/decompress should be a win/win (unless you run out of disk space...). Indeed I see that the creation of deltas is much faster; but I still do not have enough data collected....

4.8. --format=preunpacked

Here is another idea. When 'debdelta-upgrade' is called in upgrading a package 'foobar' it currently creates 'foobar_2.deb'. By an appropriate cmdline switch '--format=preunpacked', instead of creating a 'foobar_2.deb' , it directly saves all of its file to the filesystem, and it adds an extension to all the file names, making sure that no file name conflicts (=overwrite) with a preexisting file on the filesystem ; then it creates a file 'foobar_2.deb_preunpacked' , that is a deb package were 'data.tar.xxx' is replaced with 'data_list', just a text file specifying the contents of 'data.tar.xxx' and where regular files were temporarily unpacked.

Note that the above idea overlaps a lot with the SummerOfCode2010 StreamingPackageInstall

debdelta-upgrade --format=preunpacked is now implemented as a proof-of-concept (it does not really write temporary files to HD yet). The format of data_list is


Files:
 TYPE MODE USER GROUP MTIME
 NAME_FILE_WAS_UNPACKED_TO (if regular file)
 ORIGINAL_FILENAME
 LINK_NAME (if link)
[repeat]
       
Example of data_list

Files:
 d 0755 root root 1304626623
 
 ./etc
 
 - 0644 root root 1304626594
 /./etc/gnashrc_1956_debdelta_preunpacked
 ./etc/gnashrc
 l 0777 root root 1304626629
 
 ./usr/share/man/man1/gtk-gnash.1.gz
 gnash.1.gz
       

PROS: (1) may be faster; (2) if you need to upgrade a 100MB package, you do not need to save both the deb and (while 'dpkg --unpack') the whole new deb data : so there is less risk of running our of disk space.

CONS: (1) you cannot install that "preunpacked deb" twice (so dpkg should probably remove it once it has installed it); (2) you cannot move it to another host; (3) when "apt-get clean", all temporary files have to be removed as well.

So it may be a good idea to use ".deb_preunpacked" as extension for them. And I would recommend using '--format=unzipped' for essential packages such as the kernel.

If you like the idea, someone should help in changing 'dpkg' so that it would be able to install starting from 'foobar_2.deb_preunpacked'. And change APT so that it would interact with 'debdelta' to create the 'foobar_2.deb_unpacked' files, and pass them to dpkg (and clean them properly).

Notes

[1]

note though that debdelta-upgrade saves the recontructed debs in /var/cache/apt/archives, and APT does not check them there, AFAICT

[2]

the re-gzipped files are identical but for headers, (indeed gzip headers contain sometimes a timestamp ); but this is not a problem since the reconstructed gzipeed file is then piped again into 'xdelta3' or 'bsdiff' to rebuild the 'data.tar', so the header is fixed at that stage

[3]

This is implemented in the python routine delta_gzipped_files.

[4]

that is, discarding those that are more than 70% of the corresponding deb

[5]

this actually already happened some years ago, with libzip

debdelta/doc/html/index.html0000644000000000000000000002250712436652150013230 0ustar The debdelta suite

The debdelta suite

Andrea C. G. Mennucci

debdelta is an application suite designed to compute changes between Debian packages. These changes (that we will call 'deltas') are similar to the output of the "diff" program in that they may be used to store and transmit only the changes between Debian packages. This suite contains 'debdelta-upgrade', that downloads deltas and use them to create all Debian packages needed for an 'apt-get upgrade'.


1. Overview

The debdelta application suite is really composed of different applications.

1.1. debdelta

debdelta computes the delta, that is, a file that encodes the difference between two Debian packages. Example:


	$ a=/var/cache/apt/archives 
	$ debdelta -v $a/emacs-snapshot-common_1%3a20060512-1_all.deb \
	$a/emacs-snapshot-common_1%3a20060518-1_all.deb /tmp/emacs.debdelta
      
the result is: deb delta is 12.5% of deb ; that is, 15452kB would be saved

1.2. debpatch

debpatch can use the delta file and a copy of the old Debian package to recreate the new Debian package. (This process is called "applying the delta file"). If the old Debian package is not available, but is installed in the host, it can use the installed data; in this case, '/' is used in lieu of the old .deb.

Example:


	$ debpatch -A  /tmp/emacs.debdelta / /tmp/emacs.deb
      

1.3. debdeltas

debdeltas can be used to generate deltas for many debs at once. It will generate delta files with names such as package_old-version_new-version_architecture.debdelta. If the delta exceeds ~70% of the deb, 'debdeltas' will delete it and leave a stamp of the form package_old-version_new-version_architecture.debdelta-too-big. Example usages are in the man page; see also Section 3.9.

1.4. debdelta-upgrade

debdelta-upgrade will download necessary deltas and apply them to create debs for a successive apt-get upgrade. The deltas are available for upgrades in 'stable' , 'stable-security' , 'testing', 'unstable' and 'experimental', for i386 and amd64. Example usage:


	# apt-get update && debdelta-upgrade && apt-get upgrade
      
If run by a non-root user, debs are saved in /tmp/archives : do not forget to move them in /var/cache/apt/archives

debdelta-upgrade will also download .debs for which no delta is available (this is done in parallel to patching, to maximize speed). See the explanation of "debdelta-upgrade --deb-policy" in the man page for more informations and customization on which debs get downloaded.

More informations are in next sections.

1.5. debforensic

There is also another bunch of code (that though was never distributed.... it is available in the GIT repo). . debforensics creates and uses sqlite databases containing information regarding debian binary packages. debforensics --add will scan debian packages and add the list of files (and SHA1 hashes of them) to the database. debforensics --scan will check a file against multiple databases, to see if that file is part of any package. debforensics --forensic will scan a filesystem and list files that are part of a package, and files that are not (or are missplaced, or have strange permissions....).

If debdelta-upgrade fails to apply a delta, and '-d' is passed, then a debug file is generated, and then debforensic may be used to understand what went wrong (theoretically).

Important

Beware: a full database for main/amd64 is ~350MBs, without indexes. So in practice currently I cannot keep a database in my host.

debdelta/doc/html/x54.html0000644000000000000000000000472312436652150012541 0ustar a delta

2. a delta

The delta is 'ar' archive (see 'man ar'). The delta contains 'info', some data members (named by numbers), a script named 'patch.sh.xxx', and optional gpg signatures. The script recreates the new deb. See do_delta_() in the python code for more details.

2.1. the info in a delta

a delta first 'ar' member is always named 'info', and is a text file containing some keywords and informations regarding the delta itself. [TODO add details]

2.2. how to apply a delta

TODO WRITEME. You may look into /usr/share/debdelta/debpatch.sh to understand the basics.

debdelta/debdelta.10000644000000000000000000000753012436652141011347 0ustar .TH DEBDELTA "1" "aug 2009" "debdelta" "User Commands" .SH NAME debdelta \- compute changes between Debian packages .SH SYNOPSIS .B debdelta [\fIOPTION\fR]... \fIFROMFILE\fR \fITOFILE\fR \fIDELTA .SH DESCRIPTION This program computes changes between Debian packages FROMFILE and TOFILE. These changes are stored in the file DELTA. The DELTA file may later be used by the "debpatch" program to recreate TOFILE from FROMFILE. In a sense, the pair of programs "debdelta"/"debpatch" operate on Debian packages similar to how the pair "diff"/"patch" operate on text files; indeed, the output of "debdelta" may be used to store and transmit only the changes between Debian packages; but, contrary to the "diff" output, the output of "debdelta" is not human readable, but rather it is intended to be as small as possible (to optimize for less disk usage and/or faster transmission). .SH OPTIONS .TP \fB\-\-signing\-key\fR \fIKEY\fR key used to sign the delta (using GnuPG). .TP \fB\-\-no\-md5\fR do not include MD5 info in delta. .TP \fB\-\-needsold\fR create a patch that can only be used if the old .deb is available. .TP \fB\-\-delta-algo\fR \fIALGO\fR use a specific backend for computing binary diffs; possible values are: xdelta xdelta-bzip xdelta3 bsdiff .TP \fB\-M \fIMb\fR maximum memory to use (for 'bsdiff' or 'xdelta'). .TP \fB\-v\fR verbose (can be added multiple times). .TP \fB\-d\fR print full traceback on Python errors; save useful info in temporary files in case that a backend crashes. (If '-d' is added multiple times, it also adds to the patches other extra debugging checks: only for advanced bug tracking). .TP \fB\-k\fR keep temporary files (use for debugging). .TP .BI \--gpg-home specify a different home for GnuPG, default for root is .I /etc/debdelta/gnupg while for other users is unset. See .I --homedir in .BR gpg(1) for details. .TP .BI \--disable-feature \ FEATURE disable an internal feature. See the documentation in README.features. .SH "EXAMPLE" The command .br .I debdelta libglib_2.12_all.deb libglib_2.14_all.deb /tmp/glib.debdelta .br will express the difference between two versions of libglib in /tmp/glib.debdelta ; the command .br .I debpatch \-A /tmp/glib.debdelta libglib_2.12_all.deb /tmp/glib.deb .br will create in /tmp/glib.deb a perfect copy of libglib_2.14_all.deb. .SH SECURITY A Debian package that is recreated using debpatch is byte-by-byte identical to the original one; so the cryptographic archive authentication support for APT (see .BR apt-secure (8) ) may be used to assert that it can be trusted to be installed. Moreover, if a GnuPG key is specified using .BI \--signing-key then the delta file itself is cryptograhically protected using GnuPG, MD5 and SHA1 (using the same method as .BR dpkg-sig (1) ). Note that, for non-root users, the master debdelta keyring .I /usr/share/keyrings/debian-debdelta-archive-keyring.gpg is added to the list of keyrings for gnupg. .SH EXIT STATUS .TP 0 if OK, .TP 1 if there was a retriable problem (e.g., out-of-disk-space, or an auxiliary program such 'lzma' is missing and the user is asked to install the package 'lzma'), .TP 2 if there was a non-retriable problem, .TP 3 if there was an error in a command\-line option, or in a configuration file, .TP 4 if there is an internal error, and .TP 5 if exited due to keyboard interrupt. .SH "REPORTING BUGS" Report bugs to . .SH AUTHORS Debdelta was written and is copyright \(co 2006-09 Andrea Mennucci. .br This man page was written by Jegou Pierre-yves . .SH COPYING This is free software. You may redistribute copies of it under the terms of the GNU Library General Public License . There is NO WARRANTY, to the extent permitted by law. .SH "SEE ALSO" .BR debdelta-upgrade (1), .BR debpatch (1), .BR debdeltas (1), .BR /usr/share/doc/debdelta/README. debdelta/BUGS0000644000000000000000000000000012436652141010165 0ustar debdelta/debpatch.10000644000000000000000000000404612436652141011354 0ustar .TH DEBPATCH "1" "aug 2009" "debpatch" "User Commands" .SH NAME debpatch \- Applies a patch to recreate a debian package. .SH SYNOPSIS .B debpatch [\fIOPTION\fR]... \fIDELTA \fIFROMFILE\fR \fITOFILE\fR .br .B debpatch [\fIOPTION\fR]... \fIDELTA \fI/\fR \fITOFILE\fR .br .B debpatch \fB\-\-info\fR \fIPATCH\fR .SH DESCRIPTION This program is designed to apply a patch (DELTA) to a Debian package (FROMFILE) and create the Debian package (TOFILE). If the Debian package (FROMFILE) was installed in the host system and the .deb file is not available, then the argument (FROMFILE) may be substituted by the keyword / : then debpatch will get the needed data from the host filesystem. .SH OPTIONS .TP \fB\-\-info\fR \fIPATCH Write info on \fIPATCH .TP \fB\-\-no\-md5 do not verify MD5 info. .TP \fB\-v verbose (can be added multiple times). .TP \fB\-d print full traceback on Python errors; save useful info in temporary files in case that a delta fails. .TP \fB\-k keep temporary files (use for debugging). .TP \fB\-\-format \fIFORMAT format of recreated debs. FORMAT=deb is the usual, FORMAT=unzipped means that the data.tar part is not compressed, (and this may save some time) .TP .B \-A \--accept-unsigned accept unsigned deltas. .TP .BI \--gpg-home specify a different home for GnuPG, default for root is .I /etc/debdelta/gnupg while for other users is unset. See .I --homedir in .BR gpg(1) for details. .SH EXAMPLES See .BR debdelta (1) .SH SECURITY See .BR debdelta (1) .SH EXIT STATUS See .BR debdelta (1) .SH "REPORTING BUGS" Report bugs to . .SH AUTHORS Debpatch was written and is copyright \(co 2006-09 Andrea Mennucci. .br This man page was written by Jegou Pierre-yves . .SH COPYING This is free software. You may redistribute copies of it under the terms of the GNU Library General Public License . There is NO WARRANTY, to the extent permitted by law. .SH "SEE ALSO" .BR debdelta-upgrade (1), .BR debdelta (1), .BR /usr/share/doc/debdelta/README . debdelta/better_progress-2.diff0000644000000000000000000001237712436652141013730 0ustar --- debdelta 2010/09/25 13:40:08 1.274 +++ debdelta 2010/09/25 18:20:44 @@ -1203,6 +1203,8 @@ if TD[-1] != '/': TD = TD + '/' + import thread, threading + HAVE_PRELINK=os.path.exists('/usr/sbin/prelink') HAVE_LOCALEPURGE=os.path.exists('/etc/locale.nopurge') or os.path.exists('/usr/sbin/localepurge') @@ -1232,6 +1234,8 @@ os.unlink(temp_err_name) runtime['patchprogress']=1 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (1, newdebshortname)) is_signed=False for a in ar_list_delta: @@ -1248,6 +1252,8 @@ print _("WARNING, delta is not signed:")+' '+delta runtime['patchprogress']=2 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (2, newdebshortname)) _info_patch_unzip_(TD) @@ -1263,6 +1269,8 @@ params=info_2_db(info) runtime['patchprogress']=3 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (3, newdebshortname)) #this is not needed in preparing the patch, but may help in forensic conf_files=[] @@ -1290,6 +1298,8 @@ raise DebDeltaError('Old deb size is '+str(olddebsize)+' instead of '+params['OLD/Size']) runtime['patchprogress']=4 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (4, newdebshortname)) if DEBUG > 1 : #this is currently disabled, since 'dpkg -s' is vey slow (~ 1.6 sec) @@ -1316,6 +1326,8 @@ '\nin old/installed deb, '+a+' = ' +dpkg_params[a]) runtime['patchprogress']=5 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (5, newdebshortname)) ### some auxiliary routines, separated to make code more readable @@ -1457,6 +1469,8 @@ ###see into parameters: the patch may need extra info and data runtime['patchprogress']=6 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (6, newdebshortname)) prelink_time=None @@ -1519,33 +1533,61 @@ raise DebDeltaError('localepurge removed '+str(len(localepurged))+' files.') runtime['patchprogress']=12 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (12, newdebshortname)) a='' if DEBUG: a='-v' script_time = - time.time() + + newfile=os.path.join(TD,'NEW.file') + newdebsize=None + if 'NEW/Size' in params: + newdebsize = int(params['NEW/Size']) + progresschar=0.0 + progresslen=float(os.path.getsize(os.path.join(TD,'PATCH/patch.sh'))) + if 'update_progress_exit' in runtime: + del runtime['update_progress_exit'] + + def update_progress(runtime): + while sys and time and runtime and 'update_progress_exit' not in runtime: + if os.path.exists(newfile) and newdebsize : + progress=(int(12.0 + 42.0 * progresschar / progresslen + \ + 42.0 * float(os.path.getsize(newfile)) / float(newdebsize))) + else: + progress=(int(12.0 + 42.0 * progresschar / progresslen )) + runtime['patchprogress']=progress + if sys and do_progress: + sys.stderr.write('P %2d%% %s\r' % (progress, newdebshortname,)) + time.sleep(0.05) + + progress_thread=threading.Thread(target=update_progress,args=(runtime,)) + progress_thread.daemon=True + progress_thread.start() + temp_err_name_fd, temp_err_name = tempfile.mkstemp(prefix='debdeltaE') temp_name_fd, temp_name = tempfile.mkstemp(prefix='debdeltaO') F=subprocess.Popen([SHELL,'-v','-e','PATCH/patch.sh'], cwd=TD, stderr=subprocess.PIPE, stdout=temp_name_fd) - progresschar=0.0 - progresslen=float(os.path.getsize(os.path.join(TD,'PATCH/patch.sh'))) + + for j in F.stderr: os.write(temp_err_name_fd, j) progresschar+=len(j) - progress=(int(12.0 + 84.0 * progresschar / progresslen)) - runtime['patchprogress']=progress - if do_progress: - sys.stderr.write('P %2d%% %s\r' % (progress, newdebshortname)) + F.wait() - if do_progress and terminalcolumns: #clean up - sys.stderr.write(' ' * (terminalcolumns-2) +'\r') ret=F.returncode os.close(temp_err_name_fd) os.close(temp_name_fd) + runtime['update_progress_exit']=True + script_time += time.time() + runtime['patchprogress']=97 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (97, newdebshortname)) #helper for debugging def tempos(): @@ -1595,11 +1637,9 @@ raise DebDeltaError('error in patch.sh.') #then we check for the conformance - if 'NEW/Size' in params: - newdebsize = os.stat(TD+'NEW.file')[ST_SIZE] - if newdebsize != int(params['NEW/Size']): - fore() - raise DebDeltaError('new deb size is '+str(newdebsize)+' instead of '+params['NEW/Size']) + if newdebsize != None and newdebsize != os.path.getsize(newfile): + fore() + raise DebDeltaError('new deb size is '+str(os.path.getsize(newfile))+' instead of '+str(newdebsize)) if DO_MD5: if 'NEW/MD5sum' in params: @@ -1615,14 +1655,19 @@ os.unlink(temp_name) os.unlink(temp_err_name) - runtime['patchprogress']=99 + runtime['patchprogress']=98 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (98, newdebshortname)) if newdeb: - shutil.move(TD+'NEW.file',newdeb) - + shutil.move(newfile,newdeb) + end_sec = time.time() elaps=(end_sec - start_sec) + if do_progress and terminalcolumns: #clean up + sys.stderr.write(' ' * (terminalcolumns-2) +'\r') + if VERBOSE : if newdeb: debsize = os.stat(newdeb)[ST_SIZE] debdelta/minigzip.c0000644000000000000000000001657712436652141011526 0ustar /* minigzip.c -- simulate gzip using the zlib compression library * Copyright (C) 1995-2002 Jean-loup Gailly. This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ /* Mennucci 2006: simplified Mennucci 2008: if -DBZIP, it becomes a minibzip2 TODO in minibzip2 mode, it does not check and report errors. */ /* * minigzip is a minimal implementation of the gzip utility. This is * only an example of using zlib and isn't meant to replace the * full-featured gzip. No attempt is made to deal with file systems * limiting names to 14 or 8+3 characters, etc... Error checking is * very limited. So use minigzip only for testing; use gzip for the * real thing. On MSDOS, use only on file names without extension * or in pipe mode. */ #include #ifdef BZIP #include #ifndef OF /* function prototypes */ # ifdef STDC # define OF(args) args # else # define OF(args) () # endif #endif #define uInt int #else /* BZIP */ #include "zlib.h" #endif /* BZIP */ # include # include # include #ifdef BZIP # define Z_SUFFIX ".bz2" # define SUFFIX_LEN 4 # define ZFILE BZFILE * #else # define Z_SUFFIX ".gz" # define SUFFIX_LEN 3 # define ZFILE gzFile #endif #define BUFLEN 16384 #define MAX_NAME_LEN 1024 #ifdef MAXSEG_64K # define local static /* Needed for systems with limitation on stack size. */ #else # define local #endif char *prog; void error OF((const char *msg)); void gz_compress OF((FILE *in, ZFILE out)); void gz_uncompress OF((ZFILE in, FILE *out)); void file_compress OF((char *file, char *mode)); void file_uncompress OF((char *file)); int main OF((int argc, char *argv[])); /* =========================================================================== * Display error message and exit */ void error(msg) const char *msg; { fprintf(stderr, "%s: %s\n", prog, msg); exit(1); } /* =========================================================================== * Compress input to output then close both files. */ void gz_compress(FILE *in, ZFILE out) { local char buf[BUFLEN]; int len; int err; for (;;) { len = (int)fread(buf, 1, sizeof(buf), in); if (ferror(in)) { perror("fread"); exit(1); } if (len == 0) break; #ifdef BZIP BZ2_bzwrite(out, buf, len); #else if (gzwrite(out, buf, (unsigned)len) != len) error(gzerror(out, &err)); #endif } fclose(in); #ifdef BZIP BZ2_bzclose(out); #else if (gzclose(out) != Z_OK) error("failed gzclose"); #endif } /* =========================================================================== * Uncompress input to output then close both files. */ void gz_uncompress(in, out) ZFILE in; FILE *out; { local char buf[BUFLEN]; int len; int err; for (;;) { #ifdef BZIP len = BZ2_bzread(in,buf,sizeof(buf)); #else len = gzread(in, buf, sizeof(buf)); if (len < 0) error (gzerror(in, &err)); #endif if (len == 0) break; if ((int)fwrite(buf, 1, (unsigned)len, out) != len) { error("failed fwrite"); } } if (fclose(out)) error("failed fclose"); #ifdef BZIP BZ2_bzclose(in); #else if (gzclose(in) != Z_OK) error("failed gzclose"); #endif } /* =========================================================================== * Compress the given file: create a corresponding .gz file and remove the * original. */ void file_compress(file, mode) char *file; char *mode; { local char outfile[MAX_NAME_LEN]; FILE *in; ZFILE out; strcpy(outfile, file); strcat(outfile, Z_SUFFIX); in = fopen(file, "rb"); if (in == NULL) { perror(file); exit(1); } #ifdef BZIP out = BZ2_bzopen(outfile, mode); #else out = gzopen(outfile, mode); #endif if (out == NULL) { fprintf(stderr, "%s: can't gzopen %s\n", prog, outfile); exit(1); } gz_compress(in, out); unlink(file); } /* =========================================================================== * Uncompress the given file and remove the original. */ void file_uncompress(file) char *file; { local char buf[MAX_NAME_LEN]; char *infile, *outfile; FILE *out; ZFILE in; uInt len = (uInt)strlen(file); strcpy(buf, file); if (len > SUFFIX_LEN && strcmp(file+len-SUFFIX_LEN, Z_SUFFIX) == 0) { infile = file; outfile = buf; outfile[len-3] = '\0'; } else { outfile = file; infile = buf; strcat(infile, Z_SUFFIX); } #ifdef BZIP in = BZ2_bzopen(infile, "rb"); #else in = gzopen(infile, "rb"); #endif if (in == NULL) { fprintf(stderr, "%s: can't gzopen %s\n", prog, infile); exit(1); } out = fopen(outfile, "wb"); if (out == NULL) { perror(file); exit(1); } gz_uncompress(in, out); unlink(infile); } /* =========================================================================== * Usage: minigzip [-d] [-f] [-h] [-r] [-1 to -9] [files...] * -d : decompress * -f : compress with Z_FILTERED * -h : compress with Z_HUFFMAN_ONLY * -r : compress with Z_RLE * -1 to -9 : compression level */ int main(argc, argv) int argc; char *argv[]; { int uncompr = 0; ZFILE file; char outmode[20]; strcpy(outmode, "wb6 "); prog = argv[0]; argc--, argv++; while (argc > 0) { if (strcmp(*argv, "-d") == 0) uncompr = 1; #ifndef BZIP else if (strcmp(*argv, "-f") == 0) outmode[3] = 'f'; else if (strcmp(*argv, "-h") == 0) outmode[3] = 'h'; else if (strcmp(*argv, "-r") == 0) outmode[3] = 'R'; #endif else if ((*argv)[0] == '-' && (*argv)[1] >= '1' && (*argv)[1] <= '9' && (*argv)[2] == 0) outmode[2] = (*argv)[1]; else break; argc--, argv++; } if (argc == 0) { if (uncompr) { #ifdef BZIP file = BZ2_bzdopen(fileno(stdin), "r"); #else file = gzdopen(fileno(stdin), "rb"); #endif if (file == NULL) error("can't gzdopen stdin"); gz_uncompress(file, stdout); } else { #ifdef BZIP file = BZ2_bzdopen(fileno(stdout), outmode); #else file = gzdopen(fileno(stdout), outmode); #endif if (file == NULL) error("can't gzdopen stdout"); gz_compress(stdin, file); } } else { do { if (uncompr) { file_uncompress(*argv); } else { file_compress(*argv, outmode); } } while (argv++, --argc); } return 0; } debdelta/debian/0000755000000000000000000000000012436652141010736 5ustar debdelta/debian/postinst0000644000000000000000000000236712436652141012554 0ustar #!/bin/sh set -e umask 0077 GPG_MASTER_PUB_KEYRING="/usr/share/keyrings/debian-debdelta-archive-keyring.gpg" GPG_HOME="/etc/debdelta/gnupg" sha1it () { ( cd ${GPG_HOME} echo '#if this file is deleted or it does not match, then ' > sha1_hashes.txt echo '# these files will not be removed when purging "debdelta" ' >> sha1_hashes.txt sha1sum pubring.gpg secring.gpg >> sha1_hashes.txt if test -f trustdb.gpg ; then sha1sum trustdb.gpg >> sha1_hashes.txt ; fi ) } check1it () { ( cd ${GPG_HOME} test -f sha1_hashes.txt && sha1sum -c --quiet sha1_hashes.txt ) } case "$1" in configure|reconfigure) if test ! -r ${GPG_HOME} ; then echo "Debdelta: creating ${GPG_HOME}" mkdir ${GPG_HOME} fi if test ! -r ${GPG_HOME}/pubring.gpg -a \ ! -r ${GPG_HOME}/secring.gpg ; then echo "Debdelta: creating keyrings in ${GPG_HOME}" touch ${GPG_HOME}/secring.gpg ${GPG_HOME}/pubring.gpg sha1it else echo "Debdelta: updating public keyring in ${GPG_HOME}" fi c=0 ; if check1it ; then c=1 ; fi gpg --no-tty --batch --no-options --no-auto-check-trustdb --homedir ${GPG_HOME} --import ${GPG_MASTER_PUB_KEYRING} || true if test $c = 1 ; then sha1it ; fi ;; esac debdelta/debian/rules0000755000000000000000000000707012436652141012022 0ustar #!/usr/bin/make -f # Sample debian/rules file; from GNU Hello, Copyright 1994,1995 by Ian Jackson. package = debdelta D = debian/debdelta docdir = $(D)/usr/share/doc/$(package) mandir = $(D)/usr/share/man/man1/ D2 = debian/debdelta-doc docdir2 = $(D2)/usr/share/doc/$(package) docdir2doc = $(D2)/usr/share/doc/$(package)-doc CC = gcc INSTALL_PROGRAM = install CPPFLAGS:=$(shell dpkg-buildflags --get CPPFLAGS) CFLAGS:=$(shell dpkg-buildflags --get CFLAGS) CXXFLAGS:=$(shell dpkg-buildflags --get CXXFLAGS) LDFLAGS:=$(shell dpkg-buildflags --get LDFLAGS) ifeq (,$(findstring nostrip,$(DEB_BUILD_OPTIONS))) INSTALL_PROGRAM += -s endif minibzip2: minigzip.c $(CC) -DBZIP $(CFLAGS) $(CPPFLAGS) minigzip.c -o minibzip2 $(LDFLAGS) -lbz2 minigzip: minigzip.c $(CC) $(CFLAGS) $(CPPFLAGS) minigzip.c -o minigzip $(LDFLAGS) -lz build-arch: minibzip2 minigzip $(checkdir) touch build build-indep: build: build-arch build-indep clean: $(checkdir) rm -f build *~ */*~ debian/files* debian/substvars rm -f minigzip minibzip2 rm -rf $(D) $(D2) binary-indep: checkroot build $(checkdir) rm -rf $(D2) install -d $(D2)/DEBIAN $(docdir2) $(docdir2doc) cp doc/debdelta_suite.pdf debian/copyright debian/changelog $(docdir2doc) cd $(docdir2doc) && gzip -9 changelog cp -r doc/html $(docdir2doc) ln -s ../debdelta-doc/debdelta_suite.pdf $(docdir2) ln -s ../debdelta-doc/html $(docdir2) # build package dpkg-gencontrol -Pdebian/debdelta-doc -pdebdelta-doc chown -R root:root $(D2) dpkg-deb --build $(D2) .. binary-arch: checkroot build $(checkdir) rm -rf $(D) # dirs install -d $(D)/DEBIAN $(D)/usr/bin $(docdir) $(mandir) $(D)/usr/lib/debdelta $(D)/usr/share/debdelta $(D)/usr/share/keyrings $(D)/usr/share/locale/it/LC_MESSAGES $(D)/usr/share/locale/fr/LC_MESSAGES # control install -m 755 debian/postrm $(D)/DEBIAN/ install -m 755 debian/postinst $(D)/DEBIAN/ # /usr/share cp po/it.mo $(D)/usr/share/locale/it/LC_MESSAGES/debdelta.mo cp po/fr.mo $(D)/usr/share/locale/fr/LC_MESSAGES/debdelta.mo install debpatch.sh $(D)/usr/share/debdelta install contrib/debmirror-delta-security $(D)/usr/share/debdelta/ install contrib/debmirror-marshal-deltas $(D)/usr/share/debdelta/ install contrib/debmarshal_list_useless_debs $(D)/usr/share/debdelta/ install contrib/dpkg-sig $(D)/usr/share/debdelta cp contrib/debmirror*trash_option.patch $(D)/usr/share/debdelta ln -s ../../bin/debdelta $(D)/usr/share/debdelta/debpatch-url cp keyrings/pub.gpg $(D)/usr/share/keyrings/debian-debdelta-archive-keyring.gpg chmod 0644 $(D)/usr/share/keyrings/debian-debdelta-archive-keyring.gpg # /usr/lib $(INSTALL_PROGRAM) minigzip $(D)/usr/lib/debdelta/minigzip $(INSTALL_PROGRAM) minibzip2 $(D)/usr/lib/debdelta/minibzip2 # /etc install -d $(D)/etc/debdelta cp etc/sources.conf $(D)/etc/debdelta/sources.conf echo /etc/debdelta/sources.conf >> $(D)/DEBIAN/conffiles # /usr/bin install debdelta $(D)/usr/bin/debdelta ln -s debdelta $(D)/usr/bin/debpatch ln -s debdelta $(D)/usr/bin/debdeltas ln -s debdelta $(D)/usr/bin/debdelta-upgrade # /usr/share/doc cp FAQ README.features debian/copyright debian/changelog $(docdir) cd $(docdir) && gzip -9 changelog FAQ # man cp *.1 $(mandir) gzip -9 $(mandir)/*.1 # build package dpkg-shlibdeps $(D)/usr/lib/debdelta/minigzip $(D)/usr/lib/debdelta/minibzip2 dpkg-gencontrol -Pdebian/debdelta -pdebdelta chown -R root:root $(D) dpkg-deb --build $(D) .. define checkdir test -f debdelta -a -f debian/rules endef binary: binary-indep binary-arch checkroot: $(checkdir) test $$(id -u) = 0 .PHONY: binary binary-arch binary-indep clean checkroot debdelta/debian/control0000644000000000000000000000320212436652141012336 0ustar Source: debdelta Section: devel Priority: optional Build-Depends: zlib1g-dev, libbz2-dev Maintainer: A Mennucc1 Standards-Version: 3.9.6.0 Homepage: http://debdelta.debian.net Vcs-git: git://anonscm.debian.org/collab-maint/debdelta.git Package: debdelta Architecture: any Depends: python, bzip2, binutils, ${shlibs:Depends} Recommends: python-apt, xdelta3, xdelta, lzma, xz-utils, xdelta, bsdiff, gnupg2, gnupg-agent, python-debian Conflicts: xdelta3 (<< 0y.dfsg-1) Enhances: cupt Suggests: debdelta-doc Description: diff and patch utilities which work with Debian packages debdelta is a program suite designed to compute changes between Debian packages. These changes (deltas) are similar to the output of the "diff" program in that they may be used to store and transmit only the changes between Debian packages. . This suite contains 'debdelta-upgrade', that downloads deltas and use them to create all Debian packages needed for an 'apt-get upgrade'. It may be used in a Debian/unstable host (to speed up download of new packages), or in a Debian/stable host (for security updates). . Similarly, when this package is installed, the 'cupt' package manager will use deltas to upgrade the host. Package: debdelta-doc Section: doc Architecture: all Recommends: debdelta Description: diff and patch utilities which work with Debian packages debdelta is a program suite designed to compute changes between Debian packages. These changes (deltas) are similar to the output of the "diff" program in that they may be used to store and transmit only the changes between Debian packages. . This package contains the documentation. debdelta/debian/changelog0000644000000000000000000005256212436652141012622 0ustar debdelta (0.55) unstable; urgency=medium * update git location * add debian backports to sources.conf -- A Mennucc1 Sun, 30 Nov 2014 18:30:51 +0100 debdelta (0.54) unstable; urgency=medium * [INTL:pt] Portuguese translation, thanks to Américo Monteiro (Closes: #760731). * bump standards version to 3.9.6.0 * debian/rules : add build-arch, build-indep * add new keys into keyring * import some code from GIT server branch -- A Mennucc1 Sun, 30 Nov 2014 17:00:39 +0100 debdelta (0.53) experimental; urgency=low * merge changes uploaded in wheezy, and changelogs as well * Bug fix: "owned and unowned files after purge (policy 6.8 + 10.7.3)", thanks to Holger Levsen (Closes: #617481). -- A Mennucc1 Sun, 02 Dec 2012 18:52:15 +0100 debdelta (0.50+2) unstable; urgency=high * debdelta-upgrade: uses incorrect URL when requesting i386 debdeltas to upgrade amd64 (multiarch) host, thanks to Carlos Maddela (Closes: #692194). * debdelta-upgrade: packages whose version number contains a colon not upgraded when using a proxy, thanks to Carlos Maddela (Closes: #691641). -- A Mennucc1 Wed, 07 Nov 2012 13:31:31 +0100 debdelta (0.52) experimental; urgency=low * debpatch, debdelta-upgrade : do not get confused by broken symlinks * enable hardening flags -- A Mennucc1 Fri, 02 Nov 2012 10:08:46 +0100 debdelta (0.50+1) unstable; urgency=high * debpatch, debdelta-upgrade : do not get confused by broken symlinks * enable hardening flags -- A Mennucc1 Wed, 31 Oct 2012 10:40:46 +0100 debdelta (0.51) experimental; urgency=low * debdelta, debdeltas: XZ parameter autodetection, detect CRC choice and compression -- A Mennucc1 Wed, 12 Sep 2012 16:34:53 +0200 debdelta (0.50) unstable; urgency=medium * ship 2012 gpg public key (to verify deltas with debdelta-upgrade) * correct typo in program message * correct italian translation of program messages according to d-italian rules -- A Mennucc1 Sun, 26 Aug 2012 21:10:55 +0200 debdelta (0.49) unstable; urgency=low * update italian translation of program messages * update and really ship French translation of program messages, thanks to Thomas Blein (Closes: #678329). -- A Mennucc1 Thu, 02 Aug 2012 11:00:34 +0200 debdelta (0.48) unstable; urgency=low * French translation of program messages, thanks to Thomas Blein (Closes: #678329). -- A Mennucc1 Thu, 02 Aug 2012 07:24:35 +0200 debdelta (0.47) unstable; urgency=low * debdelta , debdeltas : autodetect (some) xz parameters for data.tar.xz * debdelta-upgrade: --forensic=http is the default -- A Mennucc1 Mon, 28 May 2012 17:34:56 +0200 debdelta (0.46) unstable; urgency=low * debdelta-upgrade: support multiarch -- A Mennucc1 Fri, 23 Mar 2012 20:56:56 +0100 debdelta (0.45) unstable; urgency=low * debdelta-upgrade/debpatch : new option --forensic, to report a log when a delta fails * debdelta/debdeltas : new option --forensicdir, to store hashes to check above reports * do not use hardcoded errno values, thanks to Pino Toscano (Closes: #640627). * typo in Recommends: xz -> xz-utils, thanks to Eugene V. Lyubimkin (Closes: #641189). -- A Mennucc1 Tue, 06 Dec 2011 17:27:16 +0100 debdelta (0.44) unstable; urgency=low * support xz compression for data.tar part in .deb (only when defult compression is used) * Bug fix: "FTBFS with binutils-gold", thanks to Angel Abad (Closes: #633467). * Bug fix: "Do not wait forever the connection to server", thanks to Rodolphe Pelloux-Prayer (Closes: #636022). -- A Mennucc1 Sun, 28 Aug 2011 10:17:40 +0200 debdelta (0.43) unstable; urgency=low * add code to support python-apt 0.8 API, thanks to Julian Andres Klode (Closes: #572059). * Add a fallback in case of upgrade errors, thanks to Nelson A. de Oliveira (Closes: #612423) and thanks to Ritesh Raj Sarraf (Closes: #592053). * New package 'debdelta-doc' contains the documentation. -- A Mennucc1 Sat, 28 May 2011 18:58:05 +0200 debdelta (0.42exp) experimental; urgency=low * Implemented streaming capabilities in patches; implemented new --format option for 'debdelta-upgrade', so that '--format=unzipped' will create debs with data.tar uncompressed * debdelta-upgrade bails out on a delta if locale files are missing, even if 'localepurge' is not used (see bug 619086) -- A Mennucc1 Wed, 04 May 2011 11:04:55 +0200 debdelta (0.41) unstable; urgency=low * Bump Standards-Version . * Review --help messages . * Small corrections to it.po . * Remove obsolete stuff from contrib/ . * Updated version of 'debmirror-delta-security' (for stable=squeeze). * New 'debmirror-marshal-deltas' script, that uses the new 'debmirror --debmarshal' option to manage the debs mirror and generate the deltas used by 'debdelta-upgrade'. * Add contrib/debmarshal_list_useless_debs (used by above). * Reviewed the README files. -- A Mennucc1 Mon, 07 Mar 2011 19:26:54 +0100 debdelta (0.40exp) experimental; urgency=low * Add l10n support, it_IT translation (viva DUCCIT!) * Type fix in man debdelta-upgrade, thanks to A. Costa (Closes: #593174). * Typo fix in error message: "...applying of delta for ... failed", thanks to A. Costa (Closes: #593187). * Use 'gpg2 --batch' internally for signing or verifying, (to avoid printing on the tty as much as possible) * Implement usage of gnupg-agent in the example scripts * Recommends gnupg2, gnupg-agent -- A Mennucc1 Sat, 25 Sep 2010 15:19:14 +0200 debdelta (0.39trl) unstable; urgency=high * The real thing release: upload the real 0.39, not a damaged version; * fixes: "debdelta _append_() wrong number of arguments", thanks to cybjit (Closes: #585079) for noting. -- A Mennucc1 Sun, 13 Jun 2010 16:58:15 +0200 debdelta (0.39) unstable; urgency=low * Reviewed messages printed (at different verbosity levels) . * Inside the delta, the internal script uses shell pipes, to reduce usage of temporary files . * Set Homepage: http://debdelta.debian.net * etc/sources.conf : use debdeltas.debian.net, thanks to Paul Wise for the suggestion, and thanks to the Bononia people (Closes: #517942). * xdelta3 (>= 0y) can use a FIFO for the input source (a.k.a the old version of the file). When using '--delta-algo xdelta3' the delta now, in its internal script, can process all old files in one pipe, w/o using many chunks stored in temporary files. Such deltas are marked by a new internal feature 'needs-xdelta3-fifo'. This is disabled by '--disable-feature xdelta3-fifo' * debdelta/debdeltas : store incomplete delta files as delta_tmp_ , are renamed when they are completed (and tested, and signed, if required) -- A Mennucc1 Fri, 03 Apr 2010 13:40:59 +0200 debdelta (0.38) unstable; urgency=low * debdelta-upgrade : - properly distinguish when prelink fails due to low disk space, or simply because the file did not need to be unprelinked, or else - small changes to progress reporting - -v -d , prints the URI of missing deltas * debdeltas : huge rewrite - big speedup in scanning repositories, less disk access - remove option --clean-alt - add option --old (note that, if no '--old' arg is specified, then no deltas will be generated; this is different from previous versions of debdeltas) - different meaning of --alt option - rewrote man page, with many examples * add new script /usr/share/debdelta/debmirror-deltas (a complex script that creates a repository of deltas, that can be used by debdelta-upgrade for upgrading packages in lenny,squeeze,sid,experimental ) -- A Mennucc1 Thu, 04 Feb 2010 22:27:24 +0100 debdelta (0.37) unstable; urgency=low * debdelta-upgrade * when < 50MB disk space, 'prelink -u' fails : report it -- A Mennucc1 Fri, 01 Jan 2010 21:15:08 +0100 debdelta (0.36) unstable; urgency=low * switch license of man pages to GNU Library General Public License (version 2) (same as the code), with permission of the author * patching : fix forensic report * debdelta-upgrade - better progress reporting - if '-v' , report info as apt-get does (Closes: #557780). -- A Mennucc1 Tue, 08 Dec 2009 10:17:11 +0100 debdelta (0.35) unstable; urgency=low * do not try to unprelink kernel modules (big speedup for kernel updates) * some adjustments to patching progress reports * Bug fix: HTTPResponse instance has no attribute 'message', thanks to Ritesh Raj Sarraf (Closes: #555038) * correct buglet when 'localepurge' is detected and a patch fails -- A Mennucc1 Sun, 08 Nov 2009 21:20:51 +0100 debdelta (0.34) unstable; urgency=low * FAQ, README , man pages , control : the file created by 'debdelta' is always called a 'delta' (before it was also called a 'debdelta' or a 'patch') * add references to CUPT in debian/control and man page (Closes: #551009). * debpatch.sh needs bash (Closes: #547744). * debpatch , debdelta-upgrade : report patching progress * debdelta-upgrade : - provide proxy support (see man page) (Closes: #551748). - download debs also from ftp debian archives (Closes: #472766) - better computation of download speeds (Closes: #463671). - properly print when a delta is too big and when it is not present, or other download error (but this does not work with http proxies) -- A Mennucc1 Sun, 25 Oct 2009 14:43:54 +0100 debdelta (0.33) unstable; urgency=low * debdelta-upgrade: add checks and bails out if localepurge has deleted files from the package * correct bug in parsing of /etc/debdelta/debdelta.conf : now sections match to the package name, not filename * debdelta: simplify deltas a bit * review the output of '-v' -- A Mennucc1 Fri, 18 Sep 2009 15:43:16 +0200 debdelta (0.32) unstable; urgency=low * debdeltas: add support for Packages.gz and Packages.bz2 . * debdeltas: add option --test (that does what '-d' was doing before) * debdelta,debdeltas: - add option --disable-feature , - and add document README.features explaining backward compatibility, - and correct some bugs in features handling. * Change Depends: lzma, xdelta, xdelta3, bsdiff to Recommends ('debpatch' and 'debdelta-upgrade' will ask if they are needed). * Fix exit status and document it in man page. * Fix -v and -d , and document '-d' better in man pages. * debpatch, debdelta-upgrade : if '-d' and a delta fails, create some files that may be used to address the bug. * Bug fix: "redownload package already in the cache", thanks to Sandro Tosi (Closes: #544784). -- A Mennucc1 Fri, 04 Sep 2009 23:29:06 +0200 debdelta (0.31) unstable; urgency=low * Add GPG signatures (YAY!!) * Update man pages to explain how to use signatures. * Add a README.upgrade document in /usr/share/doc/debdelta * Copy some useful stuff from contrib/ in /usr/share/debdelta : dpkg-sig : a version of dpkg-sig, patched to support debdelta signatures debmirror-delta-security : an example script to create deltas to upgrade from stable to stable-security debpatch.sh : a simple script that explains how debpatch works * Rearrange debian/rules * Correct some bugs in lzma support, but more testing is needed * debdelta : correct bug when the old deb contains data.tar.XX the new one contains data.tar.YY and XX != YY -- A Mennucc1 Fri, 21 Aug 2009 11:08:20 +0200 debdelta (0.30) unstable; urgency=low * Correct an indent in defining HOSTID * Bug fix: "[debdelta] debdelta-upgrade doesn't work with nobody user", thanks to Mert Dirik (Closes: #497967). * Bug fix: "os.popen2 deprecated in python2.6", thanks to Pär Andersson (Closes: #539911). -- A Mennucc1 Fri, 07 Aug 2009 10:10:04 +0200 debdelta (0.29) unstable; urgency=low * Really depend on 'binutils' (Closes: 511765) * Bug fix: "crash when /proc is not mounted", thanks to Paul Wise (Closes: #506383). * Use hashlib, not md5 module, in newer python, thanks to Lars Wirzenius (Closes: #537869). * Add support for lzma , thanks to Pär Andersson (Closes: #540005), (but it was not tested). -- A Mennucc1 Wed, 05 Aug 2009 14:09:02 +0200 debdelta (0.28) unstable; urgency=low * Depend on 'binutils' , 'bzip2' (Closes: 511765, 521670) * Support python-apt 0.7.10. Thanks a lot to Julian Andres Klode * debdelta-upgrade: select packages to upgrade, thanks to Nelson A. de Oliveira (Closes: #504423). -- A Mennucc1 Wed, 08 Apr 2009 13:47:44 +0200 debdelta (0.27) unstable; urgency=low * debpatch now can apply patches using data from installed version of a package also in hosts where prelink is used; and consequently debdelta-upgrade can upgrade such hosts. Thanks roman@khimov.ru. -- A Mennucc1 Mon, 15 Sep 2008 12:40:18 +0200 debdelta (0.26) unstable; urgency=low * rewrite internal DebDeltaError class to avoid problems with python 2.5; correct typo. -- A Mennucc1 Sat, 03 May 2008 19:21:31 +0200 debdelta (0.25) unstable; urgency=low * debdelta-upgrage: add an option --deb-policy to fine-tune download of .debs, thanks to Paul Wise (Closes: #465550). -- A Mennucc1 Sun, 16 Mar 2008 12:14:42 +0100 debdelta (0.24) unstable; urgency=low * some packages (notably, linux kernels) modify files that they install , and then patching from installed debs always fails; to avoid, I added /etc/debdelta/debdelta.conf , ~/.debdelta/debdelta.conf to configure which original files to skip/ship in debian packages * add support for bzip2 compression on data.tar -- A Mennucc1 Sun, 09 Mar 2008 14:10:53 +0100 debdelta (0.23) unstable; urgency=low * If "python-apt" is missing, print a error message and exit; "python-apt" is recommended; thanks to David (Closes: #448688). * debpatch understands the keywords 'needs-xdelta' and 'needs-bsdiff' * debdeltas: 'Package' files may be used as arguments * debdeltas : can have multiple --alt options * option --delta-algo to try different binary encoders; thanks to "Jiří Paleček" (Closes: #435159). * There is now a repository of deltas for stable security (again hosted at bononia, thanks a lot zack). -- A Mennucc1 Mon, 17 Dec 2007 15:26:04 +0100 debdelta (0.22) unstable; urgency=medium * depends on xdelta3 , and correct path in python * document --delta-algo -- A Mennucc1 Tue, 26 Aug 2007 18:09:29 +0200 debdelta (0.21) unstable; urgency=medium * save header of minigzip output in patch script, since it changes in newer versions of zlib -- A Mennucc1 Tue, 17 Jul 2007 10:09:29 +0200 debdelta (0.20) unstable; urgency=low * added man pages. thanks Pierre-yves Jégou (Closes: 376598) * fix bug in name/len file matching when building debdeltas (debdelta would die of "division by zero" sometimes) -- A Mennucc1 Sat, 14 Apr 2007 09:26:07 +0200 debdelta (0.19) unstable; urgency=low * Bug fix: append_info(delta,info,T) is wrong thanks a lot to David (Closes: #386252). -- A Mennucc1 Thu, 7 Sep 2006 20:49:22 +0200 debdelta (0.18) UNRELEASED; urgency=low * debdelta-upgrade : retries on httplib errors -- A Mennucc1 Sat, 29 Jul 2006 15:18:24 +0200 debdelta (0.17) unstable; urgency=medium * debpatch : since 'echo -n -e' doesnt works with all shells , patch script will test for it * debdelta : use bash to run patch script (until #379227 is fixed ) -- A Mennucc1 Wed, 22 Jul 2006 22:15:51 +0200 debdelta (0.16) unstable; urgency=low * debdelta_upgrade - is faster (particularly on small debs - by avoiding calls to dpkg) - avoids downloading deltas if not enough disk space for applying them - identifies itself as User-Agent = 'Debian debdelta-upgrade' * md5 generation and verification may be disabled by option --no-md5 * now temporary directories are of the form ${TMPDIR}/debdeltaXXXXXX * better checking for exceptions -- A Mennucc1 Wed, 12 Jul 2006 22:15:51 +0200 debdelta (0.15) unstable; urgency=low * debdelta : - faster on .debs with many many files ; - better behaviour when 'out of disk space' errors occour * debdeltas: - added '--alt DIR' option, to say where older versions of .debs are - added --clean-alt and --clean-deltas options * debdelta-upgrade: - if a delta fails, download the .deb - sources are in /etc/debdelta/sources.conf or ~/.debdelta/sources.conf * corrected many bugs -- A Mennucc1 Wed, 28 Jun 2006 16:23:28 +0200 debdelta (0.14) unstable; urgency=low * debdeltas: can --clean unusable debdeltas * debdelta: corrected bug (cannot gunzip a symlink) -- A Mennucc1 Tue, 13 Jun 2006 12:17:31 +0200 debdelta (0.13) unstable; urgency=low * debdelta_upgrade: thanks to Michael Vogt, now uses APT caches at best * debdelta & debpatch: many improvements - divide data.tar in small chunks - always use bsdiff (xdelta is buggy) - gunzip .gz files in data.tar before delta, and gzip after (the delta of debhelper 5.0.35 -> 5.0.36 was 80% of original .deb before, now it is to 10%) -- A Mennucc1 Mon, 12 Jun 2006 20:03:16 +0200 debdelta (0.12) unstable; urgency=low * debdeltas: deltas failed due to not enough disk space will be retried * debdelta: better use of MD5; and a filename similarity matcher, when in computing difference of data.tar.gz (unfortunately, slower) also, solves a bug + debdelta_upgrade: properly quote URLs (before, it seemed as if a lot of deltas were not present in the archives!); better HTTP handling -- A Mennucc1 Fri, 9 Jun 2006 18:45:17 +0200 debdelta (0.11) unstable; urgency=low * debdelta: (by faking 2nd part of tar headers), some improvement for .deb with many small files * debdelta-upgrade : while the thread is patching, also download some .debs for which deltas are not available. -- A Mennucc1 Wed, 7 Jun 2006 16:07:36 +0200 debdelta (0.10) unstable; urgency=low * safer code * debdelta: produce shorter patch.sh, and uses bzip2 -- A Mennucc1 Thu, 1 Jun 2006 23:10:44 +0200 debdelta (0.9) unstable; urgency=low * debdeltas: '-n N' option, to decide how many deltas to compute for each package ; '--avoid file' option, to avoid packages from a dist ('file' is a Python database, that can be created with contrib/parse_dist ) * debdelta : corrected bug, now really skip conf files ! * debdeltas : corrected bug, was not exiting from forks ! -- A Mennucc1 Wed, 31 May 2006 14:13:39 +0200 debdelta (0.8) unstable; urgency=low * 'debdelta' can use MD5 to exploit identical files that were renamed. This can express the difference between tetex-doc 3.0-17 and 3.0-18 into 260kB , even though all the directory tree was moved around ! -- A Mennucc1 Tue, 30 May 2006 18:24:17 +0200 debdelta (0.7) unstable; urgency=low * 'debdelta-upgrade' uses a thread to patch and download at the same time -- A Mennucc1 Tue, 30 May 2006 12:16:44 +0200 debdelta (0.6) unstable; urgency=low * new program 'debdelta-upgrade' to download and apply deltas (from my mirror) -- A Mennucc1 Mon, 29 May 2006 16:16:40 +0200 debdelta (0.5) unstable; urgency=low * new program 'debdeltas' to scan archive of debs and create an archive of deltas * 'debpatch' works on installed version of old deb, even in case of dpkg diversions -- A Mennucc1 Sat, 27 May 2006 12:00:13 +0200 debdelta (0.4) unstable; urgency=low * 'debpatch' can use a delta to create the new .deb, even if the old debian package file is not available, provided that it is installed in the host * code cleanup -- A Mennucc1 Tue, 23 May 2006 15:59:08 +0200 debdelta (0.3) experimental; urgency=low * use bsdiff when memory does not exceed 50Mb, and free disk space is enough * ' debpatch --info patch' to just know info on a patch -- A Mennucc1 Mon, 22 May 2006 10:34:06 +0200 debdelta (0.2) experimental; urgency=low * Added getopt support. Reorganized code. * Be careful of cwd when using os.path.abspath. * Add parameters to patch.sh , in particular, support for '#unpack-old' keyword * Graduated verbosity. -- A Mennucc1 Sat, 20 May 2006 14:06:23 +0200 debdelta (0.1) experimental; urgency=low * first alpha * it works on two debs with gzip compression -- A Mennucc1 Fri, 19 May 2006 19:19:54 +0200 debdelta/debian/postrm0000644000000000000000000000076112436652141012211 0ustar #!/bin/sh set -e GPG_HOME=/etc/debdelta/gnupg check1it () { ( cd ${GPG_HOME} test -f sha1_hashes.txt && sha1sum -c --quiet sha1_hashes.txt ) } if [ "$1" = purge ] ; then if [ -r /var/lib/debdelta ] ; then rm -r /var/lib/debdelta fi if check1it ; then ( cd ${GPG_HOME} rm -f pubring.gpg secring.gpg trustdb.gpg if test -f pubring.gpg~ ; then rm -f pubring.gpg~ fi rm -f sha1_hashes.txt ) rmdir ${GPG_HOME} || true fi fi debdelta/debian/copyright0000644000000000000000000000503712436652141012676 0ustar This package was put together by Andrea Mennucci. debdelta Program Copyright (C) 2006-2011 Andrea Mennucci. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Library General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA minigzip Copyright (C) 1995-1998 Jean-loup Gailly and Mark Adler This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. contrib/debmarshal_list_useless_debs Downloaded from http://debmarshal.googlecode.com/svn/trunk/repository2/pooldebclean/pooldebclean.pl and then slighlty patched. Copyright 2010 Google Inc. , 2011 A. Mennucci Author: Drake Diedrich , A. Mennucci Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. debdelta/example_d_r.txt0000644000000000000000000000056412436652141012541 0ustar #name version arch filename disttoken foobar 14 all pool/foobar_14.deb unstable foobar 15 all pool/foobar_15.deb unstable foobar 16 all pool/foobar_16.deb unstable foobar 16 all pool/foobar_16.deb testing foobar 17 all pool/foobar_17.deb unstable foobar 18 all pool/foobar_18.deb unstable foobar 18 all pool/foobar_18.deb testing foobar 19 all pool/foobar_19.deb unstable debdelta/debpatch0000777000000000000000000000000012436652141012675 2debdeltaustar debdelta/better_progress.diff0000644000000000000000000001206712436652141013565 0ustar --- debdelta 2010/04/06 09:01:44 1.271 +++ debdelta 2010/09/18 10:27:59 @@ -1174,6 +1174,8 @@ if TD[-1] != '/': TD = TD + '/' + import thread, threading + HAVE_PRELINK=os.path.exists('/usr/sbin/prelink') HAVE_LOCALEPURGE=os.path.exists('/etc/locale.nopurge') or os.path.exists('/usr/sbin/localepurge') @@ -1203,6 +1205,8 @@ os.unlink(temp_err_name) runtime['patchprogress']=1 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (1, newdebshortname)) is_signed=False for a in ar_list_delta: @@ -1219,6 +1223,8 @@ print "WARNING, delta is not signed: "+delta runtime['patchprogress']=2 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (2, newdebshortname)) _info_patch_unzip_(TD) @@ -1234,6 +1240,8 @@ params=info_2_db(info) runtime['patchprogress']=3 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (3, newdebshortname)) #this is not needed in preparing the patch, but may help in forensic conf_files=[] @@ -1261,6 +1269,8 @@ raise DebDeltaError('Old deb size is '+str(olddebsize)+' instead of '+params['OLD/Size']) runtime['patchprogress']=4 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (4, newdebshortname)) if DEBUG > 1 : #this is currently disabled, since 'dpkg -s' is vey slow (~ 1.6 sec) @@ -1287,6 +1297,8 @@ '\nin old/installed deb, '+a+' = ' +dpkg_params[a]) runtime['patchprogress']=5 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (5, newdebshortname)) ### some auxiliary routines, separated to make code more readable @@ -1428,6 +1440,8 @@ ###see into parameters: the patch may need extra info and data runtime['patchprogress']=6 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (6, newdebshortname)) prelink_time=None @@ -1490,33 +1504,58 @@ raise DebDeltaError('localepurge removed '+str(len(localepurged))+' files.') runtime['patchprogress']=12 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (12, newdebshortname)) a='' if DEBUG: a='-v' script_time = - time.time() - + + newfile=os.path.join(TD,'NEW.file') + newdebsize=None + if 'NEW/Size' in params: + newdebsize = int(params['NEW/Size']) + temp_err_name_fd, temp_err_name = tempfile.mkstemp(prefix='debdeltaE') temp_name_fd, temp_name = tempfile.mkstemp(prefix='debdeltaO') F=subprocess.Popen([SHELL,'-v','-e','PATCH/patch.sh'], cwd=TD, stderr=subprocess.PIPE, stdout=temp_name_fd) progresschar=0.0 progresslen=float(os.path.getsize(os.path.join(TD,'PATCH/patch.sh'))) + + def update_progress(): + count=1 + while sys and time and runtime and 'update_progress_exit' not in runtime: + if os.path.exists(newfile) and newdebsize : + progress=(int(12.0 + 42.0 * progresschar / progresslen + \ + 42.0 * os.path.getsize(newfile) / newdebsize )) + else: + progress=(int(12.0 + 42.0 * progresschar / progresslen )) + runtime['patchprogress']=count ## HACK progress + if sys and do_progress: + sys.stderr.write('P %d %2d%% %s\r' % (count, progress, newdebshortname)) + time.sleep(0.05) + count += 1 + + progress_thread=threading.Thread(target=update_progress) + progress_thread.start() + for j in F.stderr: os.write(temp_err_name_fd, j) progresschar+=len(j) - progress=(int(12.0 + 84.0 * progresschar / progresslen)) - runtime['patchprogress']=progress - if do_progress: - sys.stderr.write('P %2d%% %s\r' % (progress, newdebshortname)) + F.wait() - if do_progress and terminalcolumns: #clean up - sys.stderr.write(' ' * (terminalcolumns-2) +'\r') ret=F.returncode os.close(temp_err_name_fd) os.close(temp_name_fd) + runtime['update_progress_exit']=True + script_time += time.time() + runtime['patchprogress']=97 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (97, newdebshortname)) #helper for debugging def tempos(): @@ -1566,11 +1605,9 @@ raise DebDeltaError('error in patch.sh.') #then we check for the conformance - if 'NEW/Size' in params: - newdebsize = os.stat(TD+'NEW.file')[ST_SIZE] - if newdebsize != int(params['NEW/Size']): - fore() - raise DebDeltaError('new deb size is '+str(newdebsize)+' instead of '+params['NEW/Size']) + if newdebsize != None and newdebsize != os.path.getsize(newfile): + fore() + raise DebDeltaError('new deb size is '+str(os.path.getsize(newfile))+' instead of '+str(newdebsize)) if DO_MD5: if 'NEW/MD5sum' in params: @@ -1586,14 +1623,19 @@ os.unlink(temp_name) os.unlink(temp_err_name) - runtime['patchprogress']=99 + runtime['patchprogress']=98 + if do_progress: + sys.stderr.write('P %2d%% %s\r' % (98, newdebshortname)) if newdeb: - shutil.move(TD+'NEW.file',newdeb) - + shutil.move(newfile,newdeb) + end_sec = time.time() elaps=(end_sec - start_sec) + if do_progress and terminalcolumns: #clean up + sys.stderr.write(' ' * (terminalcolumns-2) +'\r') + if VERBOSE : if newdeb: debsize = os.stat(newdeb)[ST_SIZE] debdelta/etc/0000755000000000000000000000000012436652141010267 5ustar debdelta/etc/sources.conf0000644000000000000000000000212212436652141012616 0ustar ##This file contains configurations for debdelta-upgrade ; ##this file may be overridden by a file ~/.debdelta/sources.conf ##this file is divided in sections, that start by a line such as #[section title] ##Each section of this file refers to a .deb archive, ##so each section must contain a line #delta_uri="the URI of the archive of deltas for those .debs" ## To identify a .deb archive, you may use those options ## Origin , Label , Site, Archive (any combination of those); ## Origin , Label are specified in the Release file for that archive, ## (note that Archive is called Suite in in the Release file); ## whereas site is the host. Some examples #Origin=Debian #Label=Debian-Security #Archive=unstable #Site=ftp.debian.org [main debian archive] Origin=Debian Label=Debian delta_uri=http://debdeltas.debian.net/debian-deltas [backports debian archive] Origin=Debian Backports Label=Debian Backports delta_uri=http://debdeltas.debian.net/debian-deltas [stable security debian archive] Origin=Debian Label=Debian-Security delta_uri=http://debdeltas.debian.net/debian-security-deltas debdelta/etc/debdelta.conf0000644000000000000000000000070612436652141012705 0ustar ## This conf file contains per-package options that are ## automatically applied by 'debdelta' or 'debdeltas'. [linux-image-*] skip=lib/modules/*/modules* [exim4-base] skip=var/spool/exim4/gnutls-params [openoffice.org-common] skip=usr/lib/openoffice/share/config/javasettingsunopkginstall.xml [openoffice.org-core] skip=var/lib/openoffice/basis3.2/program/services.rdb [e2fsprogs] #it transitioned in util-linux skip=usr/share/man/man8/fsck.8.gz debdelta/AUTHORS0000644000000000000000000000041612436652141010565 0ustar debdelta is Copyright (C) 2006-09 Andrea Mennucci minigzip.c -- simulate gzip using the zlib compression library Copyright (C) 1995-2002 Jean-loup Gailly For conditions of distribution and use, see copyright notice in minigzip.c debdelta/debdelta0000755000000000000000000053455512436652141011227 0ustar #!/usr/bin/python # Copyright (C) 2006-09 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later EMAIL="mennucc1@debian.org" #### messages and translations # Messages are usually printed on stdout ; fatal errors are printed on stderr ; # progress bars are on stderr ; some errors are though printed on stdout, otherwise they are # of sync wrt their context. # Messages printed at verbosity 0 or 1 are translated, higher verbosities are not # Errors are sometimes translated, sometimes not... # obscure error messages that would be printed only in very rare cases are not translated # e.g.: a malformed http header, a gnupg unexptected error, damaged input files... # more common error messages are translated, # e.g.: out of disk space while using debdelta-upgrade , file does not exist... try: import gettext gettext.bindtextdomain('debdelta','/usr/share/locale') gettext.textdomain('debdelta') _ = gettext.gettext except Exception,a: sys.stderr.write('Could not initialize "gettext", translations will be unavailable\n'+str(a)) def _(x): return x doc={} doc['delta']=_("""\ Usage: debdelta [ option... ] fromfile tofile delta Computes the difference of two deb files, from fromfile to tofile, and writes it to delta Options: --signing-key KEY gnupg key used to sign the delta --no-md5 do not include MD5 info in delta --needsold create a delta that can only be used if the old deb is available -M Mb maximum memory to use (for 'bsdiff' or 'xdelta') --delta-algo ALGO use a specific backend for computing binary diffs """) doc['deltas']=_("""\ Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files] Computes all missing deltas for deb files. It orders by version number and produce deltas to the newest version Options: --signing-key KEY key used to sign the deltas (using GnuPG) --dir DIR force saving of deltas in this DIR (otherwise they go in the dir of the newer deb_file) --old ARGS 'Packages' files containing list of old versions of debs --alt ARGS for any cmdline argument, search for debs also in this place -n N how many deltas to produce for each deb (default unlimited) --no-md5 do not include MD5 info in delta --needsold create a delta that can only be used if the old .deb is available --delta-algo ALGO use a specific backend for computing binary diffs; possible values are: xdelta xdelta-bzip xdelta3 bsdiff -M Mb maximum memory to use (for 'bsdiff' or 'xdelta') --clean-deltas delete deltas if newer deb is not in archive --cache cache parsed version of Packages.bz2 as Packages.debdelta_cache """) ## implement : --search search in the directory of the above debs for older versions doc['patch']=_("""\ Usage: debpatch [ option... ] delta fromfile tofile Applies delta to fromfile and produces a reconstructed version of tofile. (When using 'debpatch' and the old .deb is not available, use '/' for the fromfile.) Usage: debpatch --info delta Write info on delta. Options: --no-md5 do not verify MD5 (if found in info in delta) -A accept unsigned deltas --format FORMAT format of created deb """) doc['delta-upgrade']=_("""\ Usage: debdelta-upgrade [package names] Downloads all deltas and apply them to create the debs that are needed by 'apt-get upgrade'. Options: --dir DIR directory where to save results --deb-policy POLICY policy to decide which debs to download, -A accept unsigned deltas --format FORMAT format of created debs """) doc['patch-url']=_("""\ Usage: debpatch-url [package names] Show URL wherefrom to downloads all deltas that may be used to upgrade the given package names """) doc_common=_("""\ -v verbose (can be added multiple times) --no-act do not do that (whatever it is!) -d add extra debugging checks -k keep temporary files (use for debugging) --gpg-home HOME specify a different home for GPG See man page for more options and details. """) minigzip='/usr/lib/debdelta/minigzip' minibzip2='/usr/lib/debdelta/minibzip2' #################################################################### import sys , os , tempfile , string ,getopt , tarfile , shutil , time, traceback, ConfigParser, subprocess, time, tarfile, stat, hashlib, random, gzip try: import debian.deb822 debian_deb822 = debian.deb822 except ImportError: debian_deb822 = None import cPickle as pickle from stat import ST_SIZE, ST_MTIME, ST_MODE, S_IMODE, S_IRUSR, S_IWUSR, S_IXUSR from os.path import abspath, expanduser from copy import copy from types import FunctionType string_types = (str, unicode) # change this for python3 def get_termsize(): import termios, fcntl, struct s = struct.pack("HHHH", 0, 0, 0, 0) fd_stdout = sys.stdout.fileno() x = fcntl.ioctl(fd_stdout, termios.TIOCGWINSZ, s) return struct.unpack("HHHH", x)[:2] try: (terminalrows , terminalcolumns) = get_termsize() except: (terminalrows , terminalcolumns) = (None, None) #(24, 80) ################################################# main program, read options #target of: maximum memory that bsdiff will use MAXMEMORY = 1024 * 1024 * 50 #this is +-10% , depending on the package size MAX_DELTA_PERCENT = 70 #min size of .deb that debdelta will consider #very small packages cannot be effectively delta-ed MIN_DEB_SIZE = 10 * 1024 N_DELTAS=None USE_DELTA_ALGO = 'bsdiff' DEBUG = 0 VERBOSE = 0 KEEP = False INFO = False NEEDSOLD= False DIR = None ALT = [] OLD = [] ACT = True DO_MD5 = True DEB_POLICY = ['b','s','e'] DO_PROGRESS = terminalcolumns != None #where/how debpatch/debdelta-upgrade will send forensic data, when patching fails #possible values: # False : do not send them # True : compute forensic but not send them, just list them # mail : automatically send by email to default address # user@domain : automatically send by email to address # mailto:user@domain : as above # mutt:user@domain : as above, but use 'mutt', so the user can customize it # http://domain/cgi : send them automatically thru a CGI script #Warning: the above is mostly TODO FORENSIC='http' #directory tree where forensic info are stored by 'debdeltas' FORENSICDIR=None DEB_FORMAT='deb' DEB_FORMAT_LIST=('deb','unzipped','preunpacked') #not yet implemented on patching side : (,'piped') #for debdeltas: test patches internally DO_TEST = False DO_GPG = True #this is changed a few lines below GPG_SIGNING_KEY = None if os.getuid() == 0: GPG_HOME="/etc/debdelta/gnupg" else: GPG_HOME=None GPG_MASTER_PUB_KEYRING="/usr/share/keyrings/debian-debdelta-archive-keyring.gpg" GPG_CMD='gpg' if os.path.exists('/usr/bin/gpg2'): GPG_CMD='/usr/bin/gpg2' CLEAN_DELTAS = False CLEAN_DELTAS_MTIME = 2 # two days grace period CLEAN_ALT = False DO_PREDICTOR = False DO_CACHE = False #cache parsed version of Packages.bz2 as Packages.debdelta_cache #see README.features DISABLEABLE_FEATURES=['xz', 'lzma', 'xdelta3-fifo'] DISABLED_FEATURES=[] HTTP_USER_AGENT={'User-Agent': ('Debian debdelta-upgrade' ) } DPKG_MULTIARCH=( 0 == os.system('dpkg --assert-multi-arch 2> /dev/null') ) if __name__ != "__main__": action = None elif os.path.dirname(sys.argv[0]) == '/usr/lib/apt/methods' : action = None else: action=(os.path.basename(sys.argv[0]))[3:] actions = ('delta','patch','deltas','delta-upgrade', 'patch-url') if action not in actions: print 'wrong filename: should be "deb" + '+repr(actions) raise SystemExit(4) __doc__ = doc[action] + doc_common #GPG signatures are required for debdelta-upgrade and debpatch DO_GPG = action in ( "delta-upgrade", "patch") try: ( opts, argv ) = getopt.getopt(sys.argv[1:], 'vkhdM:n:A' , ('help','info','needsold','dir=','no-act','alt=','old=','delta-algo=', 'max-percent=','deb-policy=','clean-deltas','clean-alt','no-md5','debug','forensicdir=','forensic=', 'signing-key=', "accept-unsigned", "gpg-home=", "disable-feature=", "test", "format=","cache") ) except getopt.GetoptError,a: sys.stderr.write(sys.argv[0] +': '+ str(a)+'\n') raise SystemExit(3) for o , v in opts : if o == '-v' : VERBOSE += 1 elif o == '-d' or o == '--debug' : DEBUG += 1 elif o == '-k' : KEEP = True elif o == '--no-act': ACT=False elif o == '--no-md5': DO_MD5=False elif o == '--clean-deltas' : CLEAN_DELTAS = True elif o == '--clean-alt' : CLEAN_ALT = True print 'Warning, currently --clean-alt does nothing.' elif o == '--needsold' : NEEDSOLD = True elif o == '--delta-algo': USE_DELTA_ALGO=v elif o == '--max-percent': MAX_DELTA_PERCENT=int(v) elif o == '--deb-policy' : DEB_POLICY = [j[0] for j in v.split(',') if j] elif o == '-M' : if int(v) <= 1: print 'Error: "-M ',int(v),'" is too small.' raise SystemExit(3) if int(v) <= 12: print 'Warning: "-M ',int(v),'" is quite small.' MAXMEMORY = 1024 * 1024 * int(v) elif o == '-n' : N_DELTAS = int(v) if N_DELTAS < 0: print 'Error: -n ',v,' is negative.' raise SystemExit(3) elif o == '--test' and action == 'deltas' : DO_TEST = True elif o == '--info' and action == 'patch' : INFO = True elif o == '--dir' : DIR = abspath(expanduser(v)) if v[-2:] == '//': DIR += '//' if not os.path.isdir(DIR): sys.stderr.write( _("Error: argument of --dir is not a directory:") +' '+ DIR +'\n') raise SystemExit(3) elif o == '--forensicdir' : FORENSICDIR = abspath(expanduser(v)) if v[-2:] == '//': FORENSICDIR += '//' if not os.path.isdir(FORENSICDIR): sys.stderr.write( _("Error: argument of --forensicdir is not a directory:") +' '+ FORENSICDIR +'\n') raise SystemExit(3) elif o == '--forensic' : FORENSIC = v if FORENSIC[:4] == 'http': try: import poster except: print 'To use the http forensic, you must install the package "python-poster".' raise SystemExit(3) if FORENSIC[:4] in ('mutt','mail') and not os.path.exists('/usr/bin/mutt'): print 'To use this forensic, you must install the package "mutt".' raise SystemExit(3) elif o == '--alt' : if not (os.path.isfile(v) or os.path.isdir(v)) : sys.stderr.write(_('Error: argument of --alt is not a directory or a regular file:')+' '+v +'\n') raise SystemExit(3) ALT.append(v) elif o == '--old' and action == 'deltas' : if not (os.path.isfile(v) or os.path.isdir(v)) : sys.stderr.write(_("Error: argument of --old is not a directory or a regular file:")+' '+v +'\n') raise SystemExit(3) OLD.append(v) elif o == '--help' or o == '-h': print __doc__ raise SystemExit(0) elif (o == '--disable-feature') and action in ("delta", "deltas"): DISABLED_FEATURES += v.split(',') elif (o == '--signing-key') and action in ("delta", "deltas"): GPG_SIGNING_KEY=v DO_GPG=True elif (o == '--accept-unsigned' or o == '-A') and action in ("delta-upgrade", "patch"): DO_GPG=False elif (o == '--gpg-home'): GPG_HOME=abspath(expanduser(v)) if not os.path.isdir(GPG_HOME): print _("Error: --gpg-home `%s' does not exist.") % GPG_HOME raise SystemExit(3) elif o == '--format' : # maybe, and action in ("delta-upgrade", "patch"): if v not in DEB_FORMAT_LIST: sys.stderr.write(_("Error: output format `%s' is unknown.") % v + '\n') raise SystemExit(3) DEB_FORMAT=v elif o == '--cache': DO_CACHE=True else: sys.stderr.write(_("Error: option `%s' is unknown, try --help") % o + '\n') raise SystemExit(3) for i in DISABLED_FEATURES: if i not in DISABLEABLE_FEATURES: sys.stderr.write(_("Error: feature `%s' cannot be disabled.") % i + '\n') raise SystemExit(3) try: BOGOMIPS=float(subprocess.Popen('grep bogomips /proc/cpuinfo', shell=True, stdout=subprocess.PIPE). stdout.read().split(':')[-1]) except: if VERBOSE: print ' Warning, /proc not mounted, using bogus BOGOMIPS' BOGOMIPS=3000.0 SP=subprocess.Popen(['hostname','-f'], shell=False, stdout=subprocess.PIPE) HOSTID=hashlib.md5( SP.stdout.read() ).hexdigest() SP.wait() del SP TMPDIR = ( os.getenv('TMPDIR') or '/tmp' ).rstrip('/') if KEEP: def unlink(a): if VERBOSE > 2: print ' -k: would unlink ',a def rmdir(a): if VERBOSE > 2: print ' -k: would rmdir ',a def rmtree(a): if VERBOSE > 2: print ' -k: would rm -r ',a else: def __wrap__(a,cmd): c=cmd.__name__+"("+a+")" if a[ : len(TMPDIR)+9 ] != TMPDIR+'/debdelta' : raise DebDeltaError,'Internal error! refuse to '+c try: cmd(a) except OSError,s: print ' Warning! when trying to ',repr(c),'got OSError',repr(str(s)) raise def unlink(a): return __wrap__(a,os.unlink) def rmdir(a): return __wrap__(a,os.rmdir) def rmtree(a): return __wrap__(a,shutil.rmtree) #################################################### various routines def my_popen_read(cmd): return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=open(os.devnull), close_fds=True).stdout def freespace(w): assert(os.path.exists(w)) try: a=os.statvfs(w) freespace= long(a[0]) * long(a[4]) except Exception, s: print 'Statvfs error:', str(s) freespace=None return freespace dpkg_keeps_controls = ( 'conffiles','config','list','md5sums','postinst', 'postrm','preinst','prerm','shlibs','templates') def parse_dist(f,d): a=f.readline() p={} while a: if a[:4] in ('Pack','Vers','Arch','Stat','Inst','File','Size','MD5s'): a=de_n(a) i=a.index(':') assert(a[i:i+2] == ': ') p[a[:i]] = a[i+2:] elif a == '\n': d[p['Package']] = p p={} a=f.readline() def scan_control(p,params=None,prefix=None,info=None): if prefix == None: prefix = '' else: prefix += '/' a=p.readline() while a: a=de_n(a) if a[:4] in ('Pack','Vers','Arch','Stat','Inst','File'): if info != None : info.append(prefix+a) if params != None: i=a.index(':') assert(a[i:i+2] == ': ') params[prefix+a[:i]] = a[i+2:] a=p.readline() def append_info(delta,info): "insert into the delta (that is an AR archive) the info file, as a first element, possibly removing a previous occurrence" #new style : special info file TD = abspath(tempfile.mkdtemp(prefix='debdelta',dir=TMPDIR)) infofile=open(TD+'/info','w') for i in info: infofile.write(i+'\n') infofile.close() if DO_GPG: r=_compute_hashes_(TD+"/info") else: r=None system(['ar','rSi','0',delta, 'info'], TD) rmtree(TD) return r def de_n(a): if a and a[-1] == '\n' : a = a[:-1] return a def de_bar(a): if a and a[:2] == './' : a=a[2:] elif a == '/.' : a='' elif a and a[0] == '/' : a=a[1:] return a def list_ar(f): assert(os.path.exists(f)) ar_list = [] p=my_popen_read('ar t '+f) while 1: a=p.readline() if not a : break a=de_n(a) ar_list.append(a) p.close() return ar_list def list_tar(f): assert(os.path.exists(f)) ar_list = [] p=my_popen_read('tar t '+f) while 1: a=p.readline() if not a : break a=de_n(a) ar_list.append(a) p.close() return ar_list class cache_sequence(object): cache_filename=None cache=None exists=None broken=None def __init__(self, filename): "manages a cache file that store a sequence of python object" self.cache_filename=os.path.splitext(filename)[0]+'.debdelta_cache' self.cache=None self.broken=None self.exists=os.path.isfile(self.cache_filename) and \ os.path.getmtime(filename) < os.path.getmtime(self.cache_filename) def __iter__(self): assert self.exists and not self.cache self.cache=gzip.GzipFile(self.cache_filename) return self def next(self): assert self.cache try: return pickle.load(self.cache) except EOFError: self.cache=None raise StopIteration except Exception, e: print 'Cache file is broken (%r), deleting %r' % (e, self.cache_filename) if ACT: os.unlink(self.cache_filename) self.cache=None self.broken=True # do not kill program raise StopIteration def __prepare_for_write__(self): if not self.cache: if DEBUG: print ' Creating cache file :', self.cache_filename self.cache=gzip.GzipFile(self.cache_filename,'w') def close(self): if self.cache: try: self.cache.close() except Exception,e: print 'Cannot close the cache file (%r)' % (self.cache_filename,) self.broken=True try: self.cache=None except: pass __del__=close def write(self,s): " write one object" assert not self.exists if self.broken: return self.__prepare_for_write__() try: self.cache.write(pickle.dumps(s)) except Exception,e: print 'Cannot write to cache file (%r), deleting %r' % (e, self.cache_filename) self.close() if ACT: os.unlink(self.cache_filename) self.broken=True class cache_same_dict(cache_sequence): "cache occurrences of a dict that uses always the same keys; omit the keys to optimize" def __init__(self, filename, keys): super(cache_same_dict, self).__init__(filename) self.keys=keys def write(self, s): n=[s[k] for k in self.keys] super(cache_same_dict, self).write(n) def next(self): n=super(cache_same_dict, self).next() return dict(map(lambda x,y: (x,y) , self.keys, n)) # dict comprehension may be used instead ##################################################################### ALLOWED = '<>()[]{}.,;:!_-+/ abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' ECHO_TEST = r"""c='\0151\0141' E='echo -ne' if test c`$E 'i'"$c" `o = ciiao ; then : else E='echo -n' if test c`$E 'i'"$c" `o = ciiao ; then : else #echo WARNING : BUILTIN echo DOES NOT WORK OK E='/bin/echo -ne' test c`$E 'i'"$c" `o = ciiao fi fi """ def prepare_for_echo__(s): assert ( type (s) in string_types ) r='' shortquoted=False for a in s: if a in ALLOWED : r += a shortquoted = False elif a in '0123456789' : if shortquoted : a = "\\" + ('000' +oct(ord(a)))[-4:] shortquoted = False r += a else: a = "\\" + oct(ord(a)) r += a shortquoted = len(a) < 5 return r def apply_prepare_for_echo(shell,repres): a=ECHO_TEST + " $E '" + repres + "' \n exit " p = subprocess.Popen([shell], stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) (i, o) = (p.stdout, p.stdin) o.write(a) o.close() a=i.read() i.close() return a #ack! I wanted to use 'dash' as preferred shell, but bug 379227 stopped me SHELL = '/bin/bash' #check my code s='\x00'+'1ciao88\n77\r566'+'\x00'+'99\n' r=prepare_for_echo__(s) a=apply_prepare_for_echo(SHELL,r) if a != s : print 'string='+repr(s) print 'repres='+repr(r) print 'shell='+SHELL print 'output='+repr(a) print 'Errror in prepare_for_echo.' raise SystemExit(4) del r,s,a ### def prepare_for_echo(s): r=prepare_for_echo__(s) if DEBUG > 2 : a=apply_prepare_for_echo(SHELL,r) if a != s: z = 'Error in prepare_for_echo()\n' z += 'string='+repr(s)+'\n' z += 'repres='+repr(r)+'\n' z += 'shell='+SHELL+'\n' z += 'output='+repr(a)+'\n' raise DebDeltaError(z, exitcode=4) return r ##################################################################### from string import join def version_mangle(v): if ':' in v : return join(v.split(':'),'%3a') else: return v def version_demangle(v): if '%' in v : return join(v.split('%3a'),':') else: return v def tempo(): TD = abspath(tempfile.mkdtemp(prefix='debdelta',dir=TMPDIR)) for i in 'OLD','NEW','PATCH' : os.mkdir(TD+'/'+i) if VERBOSE > 2 or KEEP : print 'Temporary in '+TD return TD ########## class DebDeltaError(Exception): #should derive from (Exception):http://docs.python.org/dev/whatsnew/pep-352.html # Subclasses that define an __init__ must call Exception.__init__ # or define self.args. Otherwise, str() will fail. def __init__(self,s,retriable=False,exitcode=None,logs=None): assert(type(s) in string_types) self.retriable = retriable if retriable: self.args=(s + ' (retriable) ',) else: self.args=(s + ' (non retriable) ',) if exitcode == None: if retriable: exitcode = 1 else: exitcode = 2 self.exitcode=exitcode self.logs=logs def die(s): #if s : sys.stderr.write(s+'\n') assert type(s) in string_types raise DebDeltaError(s) def system(a,TD,saveargs=None,ignore_output=False,return_output=False): "a must be a tuple, TD the temporary directory ; if return_output , it will return (stdout,stderr,exitcode) regardless" assert type(a) in (list, tuple) # mvo: compat with python2.5 where tuple does not have index a = list(a) if VERBOSE and TD[: (len(TMPDIR)+9) ] != TMPDIR+'/debdelta' : print ' Warning "system()" in ',TD,' for ',a (temp_fd, temp_name) = tempfile.mkstemp(prefix="debdelta_out_system") (temp_err_fd, temp_err_name) = tempfile.mkstemp(prefix="debdelta_err_system") if VERBOSE > 3 : print ' system(',a,')=', #special code for pipes http://docs.python.org/library/subprocess.html#replacing-shell-pipeline old_stdin=open(os.devnull) pros=[] while '|' in a: l=a.index('|') ; a1=a[:l] ; a=a[l+1:] p=subprocess.Popen(args=a1, stdin=old_stdin, stdout=subprocess.PIPE, stderr=temp_err_fd, cwd=TD, close_fds=True) pros.append( p ) old_stdin=p.stdout final_pro= subprocess.Popen(args=a, stdin=old_stdin, stdout=temp_fd, stderr=temp_err_fd, cwd=TD, close_fds=True) for p in pros: p.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. pros.append(final_pro) for p in pros: p.wait() ret = max([ p.returncode for p in pros]) os.close(temp_fd) os.close(temp_err_fd) if VERBOSE > 3 : print ret if ignore_output==False and (os.stat(temp_name)[ST_SIZE] > 0 or os.stat(temp_err_name)[ST_SIZE] > 0 ): print ' command "%s" returned %d and produced output as follows' % (a,ret) for i in open(temp_name): print 'stdout: ',repr(i) for i in open(temp_err_name): print 'stderr: ',repr(i) if return_output: return temp_name, temp_err_name, ret os.unlink(temp_err_name) os.unlink(temp_name) if ret == 0: return elif ret != 1 or a[0] != 'xdelta' : s='Error , non zero return status '+str(ret)+' for command "'+repr(a)+'"' try: if DEBUG and saveargs: T=abspath(tempfile.mkdtemp(prefix='debdelta',dir=TMPDIR)) open(T+'/command','w').write(repr(a)) for l in saveargs: if l[0] != '/': l = TD+'/'+l if os.path.exists(l): shutil.copy2(l,T) s=s+'\n saved argument '+l+' in '+T else: s=s+'\n did not find argument '+l except OSError,o: s=s+'\n (there was an additional OSError "'+str(o)+'" when trying to save arguments)' die(s) def check_deb(f): if not os.path.exists(f) : die(_("Error: the file `%s' does not exist.") % f) if not os.path.isfile(f) : die(_("Error: `%s' is not a regular file.") % f) p=open(f) if p.read(21) != "!\ndebian-binary" : die(_("Error: `%s' does not seem to be a Debian package.") % f) p.close() def check_is_delta(f): if not os.path.exists(f) : die(_("Error: the file `%s' does not exist.") % f) if not os.path.isfile(f) : die(_("Error: `%s' is not a regular file.") % f) p=open(f) if p.read(8) != "!\n" : die(_("Error: `%s' does not seem to be a Debian delta.") % f) p.close() def puke(s,e=None): " write informations on stderr, if DEBUG also traceback" (typ, value, trace)=sys.exc_info() if e == None or len(str(e)) < 2: sys.stderr.write(str(s)+' : '+str(e)+' '+str(typ)+' '+str(value)+'\n') else: sys.stderr.write(str(s)+' : '+str(e)+'\n') if DEBUG and trace and traceback.print_tb(trace): sys.stderr.write( traceback.print_tb(trace)+'\n') ################################################################### GPG def gpg_base_commandline(): if GPG_HOME: GPG_BASE_CMD_LINE=[GPG_CMD,"--homedir",GPG_HOME] else: GPG_BASE_CMD_LINE=[GPG_CMD,"--keyring",GPG_MASTER_PUB_KEYRING] if VERBOSE < 1 : GPG_BASE_CMD_LINE+=['--quiet'] return GPG_BASE_CMD_LINE def gpg_sign_command(): return gpg_base_commandline()+["--batch","--armor","--clearsign","--default-key",GPG_SIGNING_KEY,"--sign"] def compute_md5_len(o): "hash the file using MD5. 'o' may be a string (in which case the file is opened) or a file type; returns MD5 and length" if type(o) in string_types: o = open(o) m=hashlib.md5() a=o.read(1024) l=0 while a: l+=len(a) m.update(a) a=o.read(1024) return m.hexdigest(), l def compute_md5(o): "hash the file using MD5. 'o' may be a string (in which case the file is opened) or a file type; returns MD5 (as a string of hexes)" return compute_md5_len(o)[0] def _compute_hashes_(na): "hash the file" o = open(na) m=hashlib.md5() s=hashlib.sha1() a=o.read(1024) while a: m.update(a) s.update(a) a=o.read(1024) r = ( m.hexdigest(), s.hexdigest(), os.stat(na)[ST_SIZE]) return r def _compute_hashes_db_(li,DIR): db={} for na in li: db[na] = _compute_hashes_(DIR+'/'+na) return db def verify_signature(signature, DIR): a="-----BEGIN PGP SIGNED MESSAGE-----\n" if open(signature).read(len(a)) != a: return ('BAD_FORMAT',signature) role=os.path.basename(signature) assert role[:4] == "_gpg" role=role[4:] (temp_fd, temp_name) = tempfile.mkstemp(prefix="debdelta_gpg_verified") #(read_end, write_end) = os.pipe() p=subprocess.Popen(gpg_base_commandline() + ['--batch','--status-fd',"2",'--output',"-",signature], stdout=subprocess.PIPE, stderr=temp_fd, stdin=open(os.devnull), close_fds=True) r=_verify_signature_no_gpg(p.stdout, DIR, role) p.wait() os.close(temp_fd) if VERBOSE > 1 or p.returncode: for j in open(temp_name): print ' GPG> ',j, os.unlink(temp_name) if p.returncode: return ('GPG_VERIFY_FAILED',signature) return r def _verify_signature_no_gpg(signature, DIR, role): #list stuff, skipping signatures dir_list = [a for a in os.listdir(DIR) if a[:4] != '_gpg'] #compute signatures hashes = _compute_hashes_db_(dir_list, DIR) #scan hashes file (GPG already verified) if type(signature) in (str,unicode): f=open(signature) elif hasattr(signature,'readline'): f=signature else: raise AssertionError a=f.readline() if a != "Version: 4\n": return ("UNSUPPORTED_VERSION",a) a=f.readline() while a: if a[:5] == "Role:": if a[5:].strip() != role : return ("ROLE_MISMATCH",a) a=f.readline() elif a[:6] == "Files:" : #parse files a=f.readline() while a and a[0] in ( '\t' , ' ') : a=a.rstrip('\n') a=a.lstrip() a=a.split(' ') if VERBOSE > 3 : print ' checking hashes ',a (md5,sha1,le,na)=a if na not in dir_list: return ('ABSENT',na) (cmd5,csha1,cle)=hashes[na] if int(le) != cle: return ('SIZE',na) # check hashes if md5 != cmd5 : return ('MD5',na) if sha1 != csha1 : return ('SHA1',na) dir_list.remove(na) a=f.readline() elif VERBOSE > 2 : print ' signature header ignored: ', a a=f.readline() else: a=f.readline() #end parsing if dir_list: return ("UNCHECKED",dir_list) return True def _write_signature(db,filename,role): "starting from a database of hashes, see _compute_hashes_, it writes a signature file" f=open(filename,mode='w') ##this is the format of dpkg-sig, but is redundant, since the "date" and "signer" ##are already available thru the gpg signature #f.write("Version: 4\nSigner: \nDate: %s\nRole: %s\nFiles: \n" % (time.ctime(),role)) ##and actually dpkg-sig will validate also a simpler file, so, lets save a few bytes f.write("Version: 4\nRole: %s\nFiles:\n" % (role,)) for a in db: (m,s,l) = db[a] f.write('\t'+m+" "+s+" "+str(l)+" "+a+"\n") f.close() def sign_delta(delta, db, role="maker"): TD = abspath(tempfile.mkdtemp(prefix='debdelta',dir=TMPDIR)) try: _write_signature(db,TD+'/_temp',role) p=subprocess.Popen(gpg_sign_command() +['--output',TD+'/_gpg'+role,TD+'/_temp'], stdin=open(os.devnull), close_fds=True) p.wait() if p.returncode==0: r=system(("ar","qS",delta,TD+"/_gpg"+role),TD) except: rmtree(TD) raise rmtree(TD) if p.returncode: raise DebDeltaError('GnuPG fails to sign') if r: raise DebDeltaError('ar fails to add the signature') #################################################################### apply patch ########### info auxiliary routines def _info_patch_unzip_(TD): "unzip info and patch.sh" if os.path.exists(TD+'PATCH/info.gz'): system(('gunzip','PATCH/info.gz'),TD) if os.path.exists(TD+'PATCH/patch.sh.gz'): system(('gunzip','PATCH/patch.sh.gz'),TD) elif os.path.exists(TD+'PATCH/patch.sh.bz2'): system(('bunzip2','PATCH/patch.sh.bz2'),TD) elif os.path.exists(TD+'PATCH/patch.sh.lzma'): if not os.path.exists('/usr/bin/unlzma'): raise DebDeltaError('This patch needs lzma. Please install the Debian package "lzma".',retriable=True) system(('unlzma','PATCH/patch.sh.lzma'),TD) elif os.path.exists(TD+'PATCH/patch.sh.xz'): if not os.path.exists('/usr/bin/unxz'): raise DebDeltaError('This patch needs xz. Please install the Debian package "xz-utils".',retriable=True) system(('unxz','PATCH/patch.sh.xz'),TD) def get_info_slow(delta,T=None): if T: TD=T else: TD=tempo() if TD[-1] != '/': TD = TD + '/' delta=abspath(expanduser(delta)) system(('ar','x',delta,'info','info.gz','patch.sh','patch.sh.gz','patch.sh.bz2','patch.sh.lzma','patch.sh.xz'), TD+'/PATCH', ignore_output=True) _info_patch_unzip_(TD) info = _scan_delta_info_(TD) if T == None: rmtree(TD) return info def get_info_fast(delta): f=open(delta) s=f.readline() if "!\n" != s : raise DebDeltaError('This is not a delta file: '+delta) s = f.read(60) if len(s) != 60 : print '(Warning, cannot get info from truncated: '+delta+' )' return None if s[:4] != 'info': #old style debdelta, with info in patch.sh if VERBOSE > 1 : print ' (Warning, cannot get info from old style: '+delta+' )' return None ##parse ar segment ## see /usr/include/ar.h if s[-2:] != '`\n' : print '(Warning, cannot get info from '+delta+' , format not known)' return None l=int(s[ -12:-2 ]) s=f.read(l) if len(s) != l : print '(Warning, cannot get info from truncated: '+delta+' )' return None info= s.split('\n') f.close() return info def get_info(delta,TD=None): info=get_info_fast(delta) if info == None: info=get_info_slow(delta,TD) return info def _scan_delta_info_(TD): info=[] if os.path.isfile(TD+'PATCH/info'): #new style debdelta, with info file p=open(TD+'PATCH/info') info=p.read().split('\n') p.close() if info[-1] == '': info.pop() else: #old style debdelta, with info in patch.sh p=open(TD+'PATCH/patch.sh') s=p.readline() s=p.readline() while s: if s[0] == '#' : s=de_n(s) info.append(s[1:]) s=p.readline() p.close() return info def info_2_db(info): params={} for s in info: if ':' in s: i=s.index(':') params[s[:i]] = s[i+2:] elif s: params[s] = True return params ########### other auxiliary routines def patch_check_tmp_space(params,olddeb): if type(params) != dict: params=info_2_db(params) if 'NEW/Installed-Size' not in params or 'OLD/Installed-Size' not in params: print '(Warning... Installed size unknown...)' return True free=freespace(TMPDIR) if free == None : return True free = free / 1024 if olddeb == '/': instsize=int(params['NEW/Installed-Size']) #the last action of the script is to gzip the data.tar, so if 'NEW/Size' in params : instsize += int(params['NEW/Size']) / 1024 else: instsize = instsize * 1.8 else: instsize=int(params['NEW/Installed-Size'])+int(params['OLD/Installed-Size']) instsize += 2**13 if free < instsize : return _('not enough disk space (%(free)dkB) in directory %(dir)s for applying delta (needs %(size)dkB)') % \ {'free' : int(free), 'dir' : TMPDIR, 'size' : instsize} else: return True def scan_diversions(): f=open('/var/lib/dpkg/diversions') d={} a=1 while 1: a=f.readline() if not a: break a=de_n(a) b=de_n(f.readline()) p=de_n(f.readline()) d[a]=(b,p) f.close() return d ###################################################### debforensic extract #in base-passwd 3.5.11 #/usr/share/base-passwd/passwd.master base_passwd="""root::0:0:root:/root:/bin/bash daemon:*:1:1:daemon:/usr/sbin:/bin/sh bin:*:2:2:bin:/bin:/bin/sh sys:*:3:3:sys:/dev:/bin/sh sync:*:4:65534:sync:/bin:/bin/sync games:*:5:60:games:/usr/games:/bin/sh man:*:6:12:man:/var/cache/man:/bin/sh lp:*:7:7:lp:/var/spool/lpd:/bin/sh mail:*:8:8:mail:/var/mail:/bin/sh news:*:9:9:news:/var/spool/news:/bin/sh uucp:*:10:10:uucp:/var/spool/uucp:/bin/sh proxy:*:13:13:proxy:/bin:/bin/sh www-data:*:33:33:www-data:/var/www:/bin/sh backup:*:34:34:backup:/var/backups:/bin/sh list:*:38:38:Mailing List Manager:/var/list:/bin/sh irc:*:39:39:ircd:/var/run/ircd:/bin/sh gnats:*:41:41:Gnats Bug-Reporting System (admin):/var/lib/gnats:/bin/sh nobody:*:65534:65534:nobody:/nonexistent:/bin/sh""" base_passwd_db={} base_passwd_anti_db={} for a in base_passwd.split('\n'): a=a.split(':') base_passwd_db[a[0]]=int(a[2]) base_passwd_anti_db[int(a[2])]=a[0] base_group="""root:*:0: daemon:*:1: bin:*:2: sys:*:3: adm:*:4: tty:*:5: disk:*:6: lp:*:7: mail:*:8: news:*:9: uucp:*:10: man:*:12: proxy:*:13: kmem:*:15: dialout:*:20: fax:*:21: voice:*:22: cdrom:*:24: floppy:*:25: tape:*:26: sudo:*:27: audio:*:29: dip:*:30: www-data:*:33: backup:*:34: operator:*:37: list:*:38: irc:*:39: src:*:40: gnats:*:41: shadow:*:42: utmp:*:43: video:*:44: sasl:*:45: plugdev:*:46: staff:*:50: games:*:60: users:*:100: nogroup:*:65534:""" base_group_db={} base_group_anti_db={} for a in base_group.split('\n'): a=a.split(':') base_group_db[a[0]]=int(a[2]) base_group_anti_db[int(a[2])]=a[0] # all code following return name,mode,tartype,uid,gid,uname,gname #adapted from tarfile.py, a Python module def stat_to_tar(name): "returns name,mode,tartype,uid,gid,uname,gname,data" statres = os.lstat(name) stmd = statres.st_mode data = None if stat.S_ISREG(stmd): tartype = tarfile.REGTYPE # here ideally we should SHA1 the file ; # but this is done elsewhere for performance, # and to have multi_hash in the future elif stat.S_ISDIR(stmd): tartype = tarfile.DIRTYPE elif stat.S_ISFIFO(stmd): tartype = tarfile.FIFOTYPE elif stat.S_ISLNK(stmd): tartype = tarfile.SYMTYPE data = os.readlink(name) elif stat.S_ISCHR(stmd): tartype = tarfile.CHRTYPE elif stat.S_ISBLK(stmd): tartype = tarfile.BLKTYPE elif stat.S_ISSOCK(stmd): tartype = 'SOCKET' #SOCKETs are not supported in tar files else: raise TypeError if tartype in (tarfile.CHRTYPE, tarfile.BLKTYPE): data = str(os.major(statres.st_rdev))+' '+str( os.minor(statres.st_rdev)) uid,gid = statres.st_uid, statres.st_gid if uid in base_passwd_anti_db : uname = base_passwd_anti_db[uid] else: import pwd try: uname = pwd.getpwuid(uid)[0] except KeyError: uname = None if gid in base_group_anti_db : gname = base_group_anti_db[gid] else: import grp try: gname = grp.getgrgid(gid)[0] except KeyError: gname = None #07777 is used in tarfile.TarInfo.tobuf return name.lstrip('/'), stmd & 07777, tartype, uid, gid, uname, gname, data def tarinfo_to_ls(tartype,tarmode): "returns a string -rwxrwxrwx such as what ls -l prints " if ord(tartype) == 0 : a='_' else: if tartype >= '0' and tartype <= '6' : a="-hlcbdp"[ord(tartype) - ord('0')] else: a='?' return a+tarfile.filemode(tarmode)[1:] def sha1_hash_file(f): s=hashlib.sha1() if type(f) in string_types: f=open(f) a=f.read(1024) while a: s.update(a) a=f.read(1024) f.close() return s.digest() def hash_to_hex(s): a='' for i in s: a=a+ ( '%02x' % ord(i) ) return a def forensics_rfc(o,db,bytar,controlfiles,files,conffiles,diverted=[],diversions={},localepurged=[],prelink_u_failed=[]): " this is invoked by do_patch_() as well as do_delta_() ; in the former case, by_tar=False" assert type(diversions) == dict if type(db) == dict: for a in sorted(db.keys()): if a[:3] == 'OLD': o.write(a[4:]+': '+db[a]+'\n') else: for a in sorted(db): if a[:3] == 'OLD': o.write(a[4:]+'\n') if diverted: o.write("Diversions:\n") for a in sorted(diverted): b,p = diversions[a] o.write(" From: "+a+'\n') o.write(" To: "+b+'\n') o.write(" By: "+p+'\n') if conffiles: o.write("Conffiles:\n") for a in sorted(conffiles): o.write(' '+a+'\n') for L,N in ((controlfiles,"Control"),(files,"Files")): o.write(N+":\n") for l in sorted(L): if bytar: name,mode,tartype,uid,gid,uname,gname,data=l tmpcopy=None divert=None else: name,divert,tmpcopy=l if os.path.exists(divert): fullname,mode,tartype,uid,gid,uname,gname,data=stat_to_tar(divert) else: fullname,mode,tartype,uid,gid,uname,gname,data='',0,'?',0,0,'?','?','?' if tartype == tarfile.REGTYPE: if tmpcopy and os.path.exists(tmpcopy): data=hash_to_hex(sha1_hash_file(tmpcopy)) elif os.path.exists(divert): data=hash_to_hex(sha1_hash_file(divert)) if name in ('.', '/', './', '/.') and tartype == tarfile.DIRTYPE: #skip root continue if uname == None: uname=str(uid) if gname == None: gname=str(gid) name=de_bar(name) o.write(' '+tarinfo_to_ls(tartype,mode)+" "+uname+' '+gname) if N == "Files" and tartype == tarfile.REGTYPE and name in conffiles: o.write(" [conffile]") if N == "Files" and tartype == tarfile.REGTYPE and name in localepurged: o.write(" [localpurged]") if N == "Files" and tartype == tarfile.REGTYPE and name in prelink_u_failed: o.write(" [prelink-u failed]") if divert and not os.path.exists(divert): o.write(" [missing file %r]" % divert) if tmpcopy: o.write(" [prelink-u]") o.write("\n "+name+"\n") if data!=None: o.write(" "+data+"\n") else: o.write(" \n") def tar_those(f): " tar multiple files in one tar (all in the same base directory!). Note that f may be a list of lists or strings or mixed." temptar=tempfile.mktemp(suffix='.tgz') tar=tarfile.open(name=temptar,mode='w:gz') for z in f: if type(z) in (list,tuple): for j in z: tar.add(j,arcname=os.path.basename(j)) elif type(z) in (str,unicode): tar.add(z,arcname=os.path.basename(z)) else: DebDeltaError(' internal error m92ksy') tar.close() return temptar def forensic_send(f,forensic=FORENSIC): " note that f must be a list of lists (or None)" assert type(f) == list if not forensic : if f: sys.stderr.write(_('(Faulty delta. Please consider retrying with the option "--forensic=http" ).')+'\n') return if not f: return if all([(z == None) for z in f]): print 'Sorry, no forensic logs were generated' return if forensic[:4] in ('mutt','mail') or forensic[:7] == 'icedove' or forensic[:10]=='thunderbird': email=EMAIL if ':' in forensic: a=forensic.find(':') email == forensic[a:] forensic=forensic[:a] print _("There were faulty deltas.")+' '+_("Now invoking the mail sender to send the logs.") if forensic in ('mutt','mail'): raw_input( _('(hit any key)') ) args=[] for z in f: if z: for j in z: args+=['-a',j] subprocess.call(['mutt',email,'-s','delta_failures']+args) else: temptar=tempfile.mktemp(suffix='.tgz') tar=tarfile.open(name=temptar,mode='w:gz') for z in f: if z: for j in z: tar.add(j,arcname=os.path.basename(j)) tar.close() args="to=%s,subject=delta_failures,attachment='file:///%s'" % (email,temptar) subprocess.call([forensic,'-compose',args]) return elif forensic[:4] == 'http': print _("There were faulty deltas.")+' '+_('Sending logs to server.') temptar=tempfile.mktemp(suffix='.tgz') tar=tarfile.open(name=temptar,mode='w:gz') for z in f: if z: for j in z: tar.add(j,arcname=os.path.basename(j)) tar.close() #http://atlee.ca/software/poster import urllib, urllib2, httplib, poster poster.streaminghttp.register_openers() datagen, headers = poster.encode.multipart_encode({'auth_userid':'debdelta','auth_password':'slartibartfast',"thefile": open(temptar, "rb")}) # Create the Request object request = urllib2.Request("http://debdelta.debian.net:7890/receive", datagen, headers) # Actually do the request, and get the response print ' '+_('Server answers:'),repr(urllib2.urlopen(request).read()) return else: sys.stderr.write(_('Faulty delta. Please send by email to %s the following files:\n') % EMAIL) for z in f: if z: sys.stderr.write(' '+string.join(z,' ')+'\n') return sys.stderr.write(_('(Faulty delta. Please consider retrying with the option "--forensic=http" ).')+'\n') def elf_info(f): "returns (is_elf, ei_class, ei_data, ei_osabi, e_type)" import struct elfheader=open(f).read(32) if len(elfheader) == 32: #parse as specified in /usr/include/elf.h from libelf-dev EI_CLASS={1:'ELFCLASS32', 2:'ELFCLASS64'} EI_DATA={1:'ELFDATA2LSB', # 2's complement, little endian 2:'ELFDATA2MSB'} # 2's complement, big endian EI_OSABI={0:'ELFOSABI_SYSV',# UNIX System V ABI 1:'ELFOSABI_HPUX', 2:'ELFOSABI_NETBSD', 3:'ELFOSABI_LINUX', #fixme insert other values 9:'ELFOSABI_FREEBSD', 12:'ELFOSABI_OPENBSD', 97:'ELFOSABI_ARM'} #fixme what is ET_LOOS , ET_HIOS , ET_LOPROC, ET_HIPROC ?? ET_TYPE={1:'ET_REL', #Relocatable file 2:'ET_EXEC', #Executable file 3:'ET_DYN', #Shared object file 4:'ET_CORE'} #Core file ei_magic, ei_class, ei_data, ei_version, ei_osabi, ei_abiversion = \ struct.unpack_from('4sBBBBB',elfheader) e_type, e_machine, e_version = struct.unpack_from('HHI',elfheader,16) #FIXME I think I am getting ei_osabi wrong.. it is always 0 is_elf = '\x7fELF' == ei_magic #and ei_class in (1,2) and \ # ei_version == 1 and \ # ei_data in (1,2) and e_type>0 and e_machine>0 and e_version>0 return is_elf, EI_CLASS.get(ei_class), \ EI_DATA.get(ei_data), EI_OSABI.get(ei_osabi), ET_TYPE.get(e_type) #, e_machine, e_version else: return False, 0, 0, 0, 0 def parse_prelink_conf(): " fixme , currently unused and incomplete " prelinked_dirs=[] prelinked_blacklist=[] prelinked_blacklist_glob=[] for a in open('/etc/prelink.conf'): if a[0] == '#': continue a=a.strip() b=a.split() if len(b) != 2: print ' (sorry this line of /etc/prelink.conf cannot be parsed currently: "'+a+'")' continue if '-b' == b[0]: if '/' in b[1]: prelinked_blacklist.append(b[1]) else: prelinked_blacklist_glob.append(b[1]) elif '-l' == b[0]: prelinked_dirs.append(b[1]) ############ do_patch def do_patch(delta,olddeb,newdeb, info=None, diversions=None, do_gpg=DO_GPG): runtime={} T=None try: T=tempo() r=do_patch_(delta,olddeb,newdeb, T, runtime, info=info, diversions=diversions, do_gpg=do_gpg) except: if T : rmtree(T) if newdeb and os.path.exists(newdeb): os.unlink(newdeb) raise rmtree(T) return r def do_patch_(delta, olddeb, newdeb, TD, runtime, info=None, diversions=None, do_gpg=DO_GPG, do_progress=DO_PROGRESS): import thread, threading if TD[-1] != '/': TD = TD + '/' HAVE_PRELINK=os.path.exists('/usr/sbin/prelink') # some people purge locales w/o using 'localepurge' , see e.g. http://bugs.debian.org/619086 #HAVE_LOCALEPURGE=os.path.exists('/etc/locale.nopurge') or os.path.exists('/usr/sbin/localepurge') delta=abspath(delta) newdebshortname='-' if newdeb: newdebshortname=newdeb newdeb=abspath(newdeb) if olddeb != '/': olddeb=abspath(olddeb) elif diversions == None: diversions=scan_diversions() start_sec = time.time() runtime['patchprogress']=0 check_is_delta(delta) if olddeb != '/': check_deb(olddeb) temp_name, temp_err_name, ret=system(('ar','xvo',delta), TD+'/PATCH', return_output=True, ignore_output=True) if ret : raise DebDeltaError('Cannot extract from '+delta) ar_list_delta=[a[4:] for a in open(temp_name).read().split('\n') if a] os.unlink(temp_name) os.unlink(temp_err_name) runtime['patchprogress']=1 is_signed=False for a in ar_list_delta: if a[:4] == '_gpg': r = verify_signature(TD+'/PATCH/'+a,TD+'/PATCH') if True != r: die(delta+": the signature file "+a+" fails as follows: "+repr(r)) is_signed=True if VERBOSE > 1 : print ' The signature by "'+a[4:]+'" is correctly verified for ',delta if not is_signed: if do_gpg: die(_("Delta is not signed:")+' '+delta) elif do_gpg != None: print _("WARNING, delta is not signed:")+' '+delta runtime['patchprogress']=2 _info_patch_unzip_(TD) if not os.path.isfile(TD+'PATCH/patch.sh'): die("Error. File `%s' is not a delta file." % delta) os.symlink(minigzip,TD+'minigzip') os.symlink(minibzip2,TD+'minibzip2') #lets scan parameters, to see what it does and what it requires if info == None : info=_scan_delta_info_(TD) params=info_2_db(info) runtime['patchprogress']=3 #this is not needed in preparing the patch, but may help in forensic conf_files=[] z='/var/lib/dpkg/info/'+params['OLD/Package']+'.conffiles' if FORENSIC and os.path.isfile(z): #note that filenames do not have leading / conf_files=[de_bar(p) for p in open(z).read().split('\n') if p] del z ### s=patch_check_tmp_space(params,olddeb) if s != True: raise DebDeltaError('Sorry, '+s, True ) if olddeb != '/': os.symlink(olddeb,TD+'/OLD.file') #unpack the old control structure, if available os.mkdir(TD+'/OLD/CONTROL') #unpack control.tar.gz system(('ar','p',TD+'OLD.file','control.tar.gz','|','tar','-xzp','-f','-','-C',TD+'OLD/CONTROL'),TD) #then we check for the conformance if olddeb != '/' and 'OLD/Size' in params: olddebsize = os.stat(olddeb)[ST_SIZE] if olddebsize != int(params['OLD/Size']): raise DebDeltaError('Old deb size is '+str(olddebsize)+' instead of '+params['OLD/Size']) runtime['patchprogress']=4 if DEBUG > 1 : #this is currently disabled, since 'dpkg -s' is vey slow (~ 1.6 sec) dpkg_params={} b=params['OLD/Package'] if olddeb == '/' : p=my_popen_read('env -i dpkg -s '+b) else: p=open(TD+'OLD/CONTROL/control') scan_control(p,params=dpkg_params,prefix='OLD') p.close() if olddeb == '/' : if 'OLD/Status' not in dpkg_params: die('Error: package %s is not known to dpkg.' % b) if dpkg_params['OLD/Status'] != 'install ok installed' : die('Error: package %s is not installed, status is %s.' % ( b , dpkg_params['OLD/Status'] ) ) for a in params: if a[:3] == 'OLD' and a != 'OLD/Installed-Size' and a != 'OLD/Size': if a not in dpkg_params: die('Error parsing old control file , parameter %s not found' % a) elif params[a] != dpkg_params[a] : die( 'Error : in delta , '+a+' = ' +params[a] +\ '\nin old/installed deb, '+a+' = ' +dpkg_params[a]) del b,p #cannot delete 'a', python raise a SyntaxError runtime['patchprogress']=5 ### some auxiliary routines, separated to make code more readable def dpkg_L_faster(pa,ar,diversions): "Scan dpkg -L . 'diversions' must be prepared by scan_diversions() . Returns list of pairs of files ,and list of diverted files. " s=[] diverted=[] n='/var/lib/dpkg/info/'+pa+':'+ar+'.list' if not DPKG_MULTIARCH or not os.path.exists(n): n='/var/lib/dpkg/info/'+pa+'.list' f=open(n) while 1: a=f.readline() if not a: break a=de_n(a) if a in diversions: b,p= diversions[a] if p != pa: s.append((a,b)) diverted.append(a) else: s.append((a,a)) else: s.append((a,a)) f.close() return s,diverted def dpkg_L(pa,ar): "Scan dpkg -L . Currently unused, see previous function." #sys.stderr.write('INTERNAL WARNING: USING OBSOLETE dpkg_L\n') s=[] diverted=[] if DPKG_MULTIARCH: p=my_popen_read('env -i dpkg-query -L '+pa+':'+ar) else: p=my_popen_read('env -i dpkg-query -L '+pa) a=p.readline() while a: a=de_n(a) #support diversions if a[:26] == 'package diverts others to:': continue if s and a[:11] == 'diverted by' or a[:20] == 'locally diverted to:': orig,divert=s.pop() i = a.index(':') divert = a[i+2:] s.append( (orig,divert) ) diverted.append(orig) else: s.append( (a,a) ) a=p.readline() p.close() return s, diverted def _symlink_data_tree(pa,ar,TD,diversions,runtime): localepurged=[] prelink_u_failed=[] file_triples=[] prelink_time = 0 prelink_datasize = 0 if diversions: s,diverted=dpkg_L_faster(pa,ar,diversions) else: s,diverted=dpkg_L(pa,ar) progressline=0 progresslen=float(len(s)) for orig,divert in s: progressline+=1 progress=6.0 + 6.0 * float(progressline) / progresslen runtime['patchprogress']=progress if do_progress: sys.stderr.write('P %2d%% %s\r' % (progress, newdebshortname)) if os.path.isfile(divert) and not os.path.islink(divert) : tmpcopy=TD+'OLD/DATA'+orig d=os.path.dirname(tmpcopy) if not os.path.exists(d): os.makedirs(d) #the following code idea was provided by roman@khimov.ru unprelink=False if HAVE_PRELINK : prelink_time -= time.time() is_elf, ei_class, ei_data, ei_osabi, e_type = elf_info(divert) #according to prelink-0.0.20090925/src/main.c unprelink = is_elf and e_type in ('ET_DYN','ET_EXEC') prelink_time += time.time() if unprelink: prelink_time -= time.time() prelink_datasize += os.path.getsize(divert) if VERBOSE > 3 : print ' copying/unprelinking ',divert,' to ', tmpcopy #unfortunately 'prelink -o' sometimes alters files, see http://bugs.debian.org/627932 shutil.copy2(divert, tmpcopy) proc=subprocess.Popen(["/usr/sbin/prelink","-u",tmpcopy],stdin=open(os.devnull),\ stdout=subprocess.PIPE,stderr=subprocess.STDOUT,close_fds=True) out=proc.stdout.read().strip() proc.wait() if proc.returncode: if not os.path.exists(tmpcopy): if VERBOSE > 4 : print ' (prelink failed, symlinking ',divert,' to ',tmpcopy,')' os.symlink(divert, tmpcopy) prelink_u_failed.append(de_bar(orig)) unprelink=False elif VERBOSE > 4 : print ' (prelink failed, but file was copied)' thestat = os.statvfs(tmpcopy) if out[-39:] == 'does not have .gnu.prelink_undo section': if DEBUG: sys.stderr.write('!!'+repr(out)+'\n') elif (thestat.f_bsize * thestat.f_bavail / 1024) < 50000 : sys.stderr.write('!!Prelink -u failed, it needs at least 50000KB of free disk space\n') prelink_u_failed.append(de_bar(orig)) unprelink=False else: sys.stderr.write('!!Prelink -u failed on %s : %s\n' % (tmpcopy,out)) prelink_u_failed.append(de_bar(orig)) unprelink=False prelink_time += time.time() else: if VERBOSE > 3 : print ' symlinking ',divert,' to ',a os.symlink(divert, tmpcopy) if unprelink and FORENSIC: #unfortunately the script will delete the 'tmpcopy', so we hardlink it z=tempfile.mktemp(prefix=TD) os.link(tmpcopy,z) file_triples.append((orig,divert,z)) else: file_triples.append((orig,divert,None)) elif not os.path.exists(divert) and os.path.islink(divert): file_triples.append((orig,divert,None)) if VERBOSE > 1 : print ' Broken symlink? ',divert elif not os.path.exists(divert): file_triples.append((orig,divert,None)) if VERBOSE : print ' Disappeared file? ',divert for z in ('locale','man','gnome/help','omf','doc/kde/HTML'): w='/usr/share/'+z if orig[:len(w)] == w: localepurged.append(de_bar(orig)) else: file_triples.append((orig,divert,None)) if VERBOSE > 3 : print ' not symlinking ',divert,' to ',orig return file_triples, localepurged, prelink_u_failed, diverted, prelink_time, prelink_datasize def chmod_add(n,m): "same as 'chmod ...+... n '" om=S_IMODE(os.stat(n)[ST_MODE]) nm=om | m if nm != om : if VERBOSE > 2 : print ' Performing chmod ',n,oct(om),oct(nm) os.chmod(n,nm) def _fix_data_tree_(TD): for (dirpath, dirnames, filenames) in os.walk(TD+'OLD/DATA'): chmod_add(dirpath, S_IRUSR | S_IWUSR| S_IXUSR ) for i in filenames: i=os.path.join(dirpath,i) if os.path.isfile(i): chmod_add(i, S_IRUSR | S_IWUSR ) for i in dirnames: i=os.path.join(dirpath,i) chmod_add(i, S_IRUSR | S_IWUSR| S_IXUSR ) #initialize, just in case control_file_triples=[] file_triples=[] localepurged=[] prelink_u_failed=[] diverted=[] prelink_time=0 prelink_datasize=0 ###see into parameters: the patch may need extra info and data runtime['patchprogress']=6 prelink_time=None for a in params: if 'needs-old' == a: if olddeb == '/': die('This patch needs the old version Debian package') elif 'old-data-tree' == a : os.mkdir(TD+'/OLD/DATA') if olddeb == '/': file_triples, localepurged, prelink_u_failed, diverted, prelink_time, prelink_datasize=\ _symlink_data_tree(params['OLD/Package'],params['OLD/Architecture'],TD,diversions,runtime) else: ar_list_old= list_ar(TD+'OLD.file') if 'data.tar.bz2' in ar_list_old: system(('ar','p',TD+'OLD.file','data.tar.bz2','|','tar','-xp','--bzip2','-f','-','-C',TD+'OLD/DATA'), TD) elif 'data.tar.gz' in ar_list_old: system(('ar','p',TD+'OLD.file','data.tar.gz','|','tar','-xp','-z','-f','-','-C',TD+'OLD/DATA'), TD) elif 'data.tar.lzma' in ar_list_old: if not os.path.exists('/usr/bin/lzma'): raise DebDeltaError('This patch needs lzma. Please install the Debian package "lzma".',retriable=True) system(('ar','p',TD+'OLD.file','data.tar.lzma','|','unlzma','-c','|','tar','-xpf','-','-C',TD+'OLD/DATA'), TD) elif 'data.tar.xz' in ar_list_old: if not os.path.exists('/usr/bin/xz'): raise DebDeltaError('This patch needs xz. Please install the Debian package "xz-utils".',retriable=True) system(('ar','p',TD+'OLD.file','data.tar.xz','|','unxz','-c','|','tar','-xpf','-','-C',TD+'OLD/DATA'), TD) else: assert(0) _fix_data_tree_(TD) elif 'old-control-tree' == a: if olddeb == '/': if not os.path.isdir(TD+'OLD/CONTROL'): os.mkdir(TD+'OLD/CONTROL') p=params['OLD/Package'] a=params['OLD/Architecture'] for b in dpkg_keeps_controls : z='/var/lib/dpkg/info/' + p + ':' + a + '.'+b if not DPKG_MULTIARCH or not os.path.exists(z): z='/var/lib/dpkg/info/' + p +'.'+b if os.path.exists(z): os.symlink(z,TD+'OLD/CONTROL/'+b) control_file_triples.append((b,z,None)) del z,p #cannot delete 'a', python raise a SyntaxError #else... we always unpack the control of a .deb elif 'needs-xdelta3' == a: if not os.path.exists('/usr/bin/xdelta3'): raise DebDeltaError('This patch needs xdelta3. Please install the Debian package "xdelta3".',retriable=True) elif 'needs-xdelta3-fifo' == a: # not doing a specific check, I am using debian/control Conflicts if not os.path.exists('/usr/bin/xdelta3'): raise DebDeltaError('This patch needs xdelta3, at least version 3.0y. Please install the Debian package "xdelta3".',retriable=True) elif 'needs-xdelta' == a: if not os.path.exists('/usr/bin/xdelta'): raise DebDeltaError('This patch needs xdelta. Please install the Debian package "xdelta".',retriable=True) elif 'needs-bsdiff' == a: if not os.path.exists('/usr/bin/bsdiff'): raise DebDeltaError('This patch needs bsdiff. Please install the Debian package "bsdiff".',retriable=True) elif 'needs-lzma' == a: if not os.path.exists('/usr/bin/lzma'): raise DebDeltaError('This patch needs lzma. Please install the Debian package "lzma".',retriable=True) elif 'needs-xz' == a: if not os.path.exists('/usr/bin/xz'): raise DebDeltaError('This patch needs xz. Please install the Debian package "xz-utils".',retriable=True) elif 'needs-minibzip2' == a: pass #its your lucky day elif a[:6] == 'needs-': raise DebDeltaError('patch says "'+a+"' and this is unsupported. Get a newer debdelta.",retriable=True) elif params[a] == True: print 'WARNING patch says "'+a+'" and this is unsupported. Get a newer debdelta.' if localepurged and not DEBUG: #actually we cannot be 100% sure that the delta really needs those files, but it is quite plausible raise DebDeltaError('Error, '+str(len(localepurged))+' locale files are absent.') runtime['patchprogress']=12 script_time = - time.time() this_deb_format=DEB_FORMAT if DEB_FORMAT != 'deb' and 'NEW/data.tar' not in params: #this patch does not support streaming this_deb_format='deb' if this_deb_format == 'preunpacked' : #need a fifo os.mkfifo(TD+'data.pipe') temp_err_name_fd, temp_err_name = tempfile.mkstemp(prefix='debdeltaE') temp_name_fd, temp_name = tempfile.mkstemp(prefix='debdeltaO') cmd=[SHELL,'-e','PATCH/patch.sh'] if this_deb_format == 'unzipped': cmd+=['unzipped'] elif this_deb_format == 'preunpacked': cmd+=['piped'] env={'PATH':os.getenv('PATH')} F=subprocess.Popen(cmd, cwd=TD, bufsize=4096,close_fds=True, stdin=open(os.devnull),env=env, stderr=temp_err_name_fd, stdout=temp_name_fd) ### data used by the preunpacked method data_md5=None # md5 of uncompressed data.tar tar_status=[] # should be [True] if 'preunpacked' went fine md5_status=[] # idem #this list contains tuples of (unpacked_temporary_filename, real_filename, owner, group, tartype, mode, mtime, linkname) preunpacked_filelist=[] def do_cleanup(): for a in preunpacked_filelist: pass ##CHEAT we are not (yet) writing anything to disk #if a[0]: os.unlink(a[0]) if this_deb_format == 'preunpacked' : #do progress reporting and unpacking in filesystem def do_extension(): return '_'+str(random.randint(1,9999)).rjust(4,'0')+'_debdelta_preunpacked' def do_pipe_md5(i,o,rm,ms,ts): try: a=i.read(1024) while a and (ts == [] or ts == [True]) : rm.update(a) o.write(a) a=i.read(1024) #TODO implement progress reporting here as well o.close() ms.append(True) except: import sys ms.append(sys.exc_info()) if DEBUG: print ' do_pipe_md5 crashed:'+repr(ms) def do_tar(i,fl,s): try: dt=tarfile.open(mode='r|',fileobj=i) for tarmember in dt: if tarmember.isreg(): n='/'+tarmember.name+do_extension() while os.path.exists(n) : #wont overwrite existing stuff, never ever n='/'+tarmember.name+do_extension() else: n='' a=(n, tarmember.name, tarmember.uname, tarmember.gname,\ tarmember.type, tarmember.mode, tarmember.mtime, tarmember.linkname) fl.append(a) if n: pass ##CHEAT we are not writing anything today! #datatar.extract(tarmember,path=n) #successfully untarred! s.append(True) except:#catch problems such as "out of disk space" or corrupted data import sys s.append(sys.exc_info()) if DEBUG: print ' do_tar crashed:'+repr(s) #flush input (note that do_pipe_md5 will soon stop writing) no it seems useless #i.read() try: datapipe=open(TD+'data.pipe') rolling_md5=hashlib.md5() (piper,pipew)=os.pipe() md5_thread=threading.Thread(target=do_pipe_md5, args=(open(TD+'data.pipe'),os.fdopen(pipew,'w'),rolling_md5,\ md5_status,tar_status)) tar_thread=threading.Thread(target=do_tar, args=(os.fdopen(piper),preunpacked_filelist,tar_status)) #yeah maybe using two threads is more complex than strictly needed md5_thread.daemon=True md5_thread.start() tar_thread.daemon=True tar_thread.start() #join back the md5 md5_thread.join() data_md5=rolling_md5.hexdigest() if md5_status == [True] and params['NEW/data.tar'][:32] == data_md5: tar_thread.join() if tar_status == [True]: F.wait() #write data_list a=open(TD+'data_list','w') a.write('Files:\n') for (unpacked_temporary_filename, real_filename, owner,\ group, tartype, mode, mtime, linkname) in preunpacked_filelist: ## to convert 'tar' type into 'ls' type if tartype >= '0' and tartype <= '6' : tartype="-hlcbdp"[ord(tartype) - ord('0')] else: tartype='?' print 'WARNING unsupported tar type '+repr(tartype)+' for: '+repr(real_filename) #if tartype < '0' or tartype > '6' : # tartype='?' # print 'WARNING unsupported tar type '+repr(tartype)+' for: '+repr(real_filename) mode=oct(mode).rjust(4,'0') a.write(' %s %s %s %s %s\n %s\n %s\n %s\n' % (tartype,mode,owner,group,mtime,\ unpacked_temporary_filename,real_filename,linkname)) a.close() #append it into deb system(['ar','q','NEW.file', 'data_list'], TD) except: do_cleanup() raise else: #progress reporting for deb_format != 'preunpacked' runtime['patchprogress']=12 if 'NEW/Size' in params: NEW_size=int(params['NEW/Size']) while None == F.poll(): if os.path.exists(TD+'NEW.file'): a=os.path.getsize(TD+'NEW.file') progress=(int(12.0 + 84.0 * a / NEW_size)) else: progress=12 runtime['patchprogress']=progress time.sleep(0.1) if do_progress: sys.stderr.write('P %2d%% %s\r' % (progress, newdebshortname)) F.wait() if do_progress and terminalcolumns: #clean up sys.stderr.write(' ' * terminalcolumns + '\r') ret=F.returncode os.close(temp_err_name_fd) os.close(temp_name_fd) script_time += time.time() # for --format='preunpacked' this time also include data MD5 runtime['patchprogress']=97 #helper for debugging def tempos(f): if os.path.getsize(temp_name): f.append(temp_name) if os.path.getsize(temp_err_name): f.append(temp_err_name) if not FORENSIC: def fore(): return None elif olddeb != '/': def fore(): f=[delta,olddeb] tempos(f) return f else: def fore(): temp_fore_name='' f=[] tempos(f) try: (temp_fd,temp_fore_name) = tempfile.mkstemp(prefix="debforensic_"+params['NEW/Package']+"_") temp_file=os.fdopen(temp_fd,'w') temp_file.write('Delta: '+delta+'\n') temp_file.write('DeltaSHA1: '+hash_to_hex(sha1_hash_file(delta))+'\n') temp_file.write('LocalePurgedFilesN: '+str(len(localepurged))+'\n') temp_file.write('PrelinkUFailedN: '+str(len(prelink_u_failed))+'\n') if ret: temp_file.write('PatchExitCode: '+str(ret)+'\n') forensics_rfc(temp_file,params,False,control_file_triples,file_triples,conf_files, diverted,diversions,localepurged,prelink_u_failed) #copy short content here and remove from list for i in copy(f): if os.path.getsize(i) < 2000: f.remove(i) temp_file.write('PatchLogFile_'+str(i)+'_content:\n') for ll in open(i): temp_file.write(' '+repr(ll)+'\n') else: temp_file.write('PatchLogFileIs: '+str(i)+'\n') temp_file.close() except OSError: #Exception,s: die('!!While creating forensic '+temp_fore_name+' error:'+str(s)+'\n') f.append(temp_fore_name) return f if ret: if localepurged: raise DebDeltaError('"debdelta" is incompatible with "localepurge".') else: f=fore() raise DebDeltaError('error in patch.sh.',logs=f) #then we check for the conformance if this_deb_format == 'deb': if 'NEW/Size' in params: newdebsize = os.stat(TD+'NEW.file')[ST_SIZE] if newdebsize != int(params['NEW/Size']): f=fore() raise DebDeltaError('new deb size is '+str(newdebsize)+' instead of '+params['NEW/Size'],logs=f) if DO_MD5: if 'NEW/MD5sum' in params: if VERBOSE > 1 : print ' verifying MD5 for ',os.path.basename(newdeb or delta) m= compute_md5(open(TD+'NEW.file')) if params['NEW/MD5sum'] != m : f=fore() raise DebDeltaError(' MD5 mismatch, '+repr(params['NEW/MD5sum'])+' != ' + repr(m) , logs=f) else: print ' Warning! no MD5 was verified for ',os.path.basename(newdeb or delta) elif this_deb_format == 'unzipped' : if DO_MD5: m=compute_md5(subprocess.Popen('ar p "%s" control.tar.gz | zcat' % (TD+'NEW.file'), stdout=subprocess.PIPE,shell=True).stdout) if params['NEW/control.tar'][:32] != m: f=fore() raise DebDeltaError('MD5 mismatch for control.tar' , logs=f) m=compute_md5(subprocess.Popen('ar p "%s" data.tar' % (TD+'NEW.file'), stdout=subprocess.PIPE,shell=True).stdout) if params['NEW/data.tar'][:32] != m: f=fore() raise DebDeltaError('MD5 mismatch for data.tar', logs=f) elif this_deb_format == 'preunpacked' : if tar_status != [True]: f=fore() do_cleanup() raise DebDeltaError("something bad happened in tar: "+repr(tar_status[0][1]), logs=f) #todo format me better if md5_status != [True]: f=fore() do_cleanup() raise DebDeltaError("something bad happened in md5: "+repr(md5_status[0][1]), logs=f) #todo format me better #if DO_MD5: #actually we always do MD5 m=compute_md5(subprocess.Popen('ar p "%s" control.tar.gz | zcat' % (TD+'NEW.file'), stdout=subprocess.PIPE,shell=True).stdout) if params['NEW/control.tar'][:32] != m: f=fore() do_cleanup() raise DebDeltaError('MD5 mismatch for control.tar', logs=f) if params['NEW/data.tar'][:32] != data_md5: f=fore() do_cleanup() raise DebDeltaError('MD5 mismatch for data.tar', logs=f) else: assert('unimplemented'=='') os.unlink(temp_name) os.unlink(temp_err_name) runtime['patchprogress']=99 if newdeb: shutil.move(TD+'NEW.file',newdeb) if DEB_FORMAT != this_deb_format: #this should not be localized print 'Warning, created as standard deb: ',newdeb end_sec = time.time() elaps=(end_sec - start_sec) if VERBOSE : if newdeb: debsize = os.stat(newdeb)[ST_SIZE] else: debsize = os.stat(olddeb)[ST_SIZE] #this printout uses kibibytes, and not SizeToStr, to ease statistics print ' ' + _('Patching done, time %(time).2fsec, speed %(speed)dk/sec') % \ {'time' : elaps, 'speed' : (debsize / 1024 / (elaps+.001))} + \ ' ' + _('(script %(time).2fsec %(speed)dk/sec)') % \ {'time' : script_time, 'speed' : (debsize / 1024 / (script_time+.001))}, if prelink_time != None and prelink_time > 0 : prelink_datasize=prelink_datasize/1024 print _('(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)') % \ {'time' : prelink_time, 'size' : prelink_datasize, 'speed' : prelink_datasize / (prelink_time + .001)}, print _('(unaccounted %.2fsec)') % (elaps-prelink_time-script_time), else: print _('(unaccounted %.2fsec)') % (elaps-script_time), ## this is useless, if 'debpatch' it is in the command line, if 'debdelta-upgrade' ## it is printed elsewhere #if newdeb != None: # print 'result: '+os.path.basename(newdeb), print return (newdeb,elaps) ##################################################### compute delta def do_delta(olddeb,newdeb,delta): T=None try: T=tempo() if os.path.exists(delta+'_tmp_') : os.unlink(delta+'_tmp_') r=do_delta_(olddeb,newdeb,delta+'_tmp_',TD=T) (deltatmp, percent, elaps, info, gpg_hashes) = r info_hashes=append_info(deltatmp,info) if DO_GPG: gpg_hashes['info']=info_hashes sign_delta(deltatmp,gpg_hashes) if os.path.exists(delta) : os.rename(delta,delta+'~') os.rename(deltatmp,delta) except: if delta and os.path.exists(delta): os.unlink(delta) if delta and os.path.exists(delta+'_tmp_'): os.unlink(delta+'_tmp_') if T : rmtree(T) raise else: if T : rmtree(T) return r def do_delta_(olddeb,newdeb,delta,TD,forensic_file=None): """This function creates a delta. The delta is 'ar' archive (see 'man ar'). The delta contains data, a script, and optional gpg signatures. The script recreates the new deb. Note that the deb is (again) an 'ar' archive, and has multiple members. Simple deb members are managed directly. The complex members are 'data.tar.xxx' and 'control.tar.gz' ; these are studied in the delta_tar() function, that in turn uses the Script() class. (Note that there is just one object instanced from Script(), in variable 'script'). To recreate one of the complex members, there are two main steps in the script: (1) rebuild the member, uncompressed (2) recompress the member The part (2) is managed by script.zip_piped(). The part (1) is more complex: (1a) there is a first subshell where data from the old version of the deb are piped to stdout (moreover some gzipped files may be transparently unzipped, delta-ed and re-gzipped, see delta_gzipped_files) (1b) the former (1a) is "piped" into the delta backend, to transform the old data into new. If the delta-backend is 'bsdiff' then the above process (1a,b) is repeated in chunks (indeed bsdiff cannot manage large files w/o exausting all your memory!) All the (chunks of) steps (1a,b) are in a subshell, and its result stdout is piped in (2). """ if TD[-1] != '/': TD = TD + '/' import fnmatch start_sec = time.time() #I do not like global variables but I do not know of another solution global bsdiff_time, bsdiff_datasize bsdiff_time = 0 bsdiff_datasize = 0 olddeb=abspath(olddeb) check_deb(olddeb) os.symlink(olddeb,TD+'/OLD.file') olddebsize = os.stat(olddeb)[ST_SIZE] newdeb=abspath(newdeb) check_deb(newdeb) os.symlink(newdeb,TD+'/NEW.file') newdebsize = os.stat(newdeb)[ST_SIZE] free=freespace(TD) if free and free < newdebsize : raise DebDeltaError('Error: not enough disk space in '+TD, True) delta=abspath(delta) #generater for numbered files def a_numb_file_gen(): deltacount = 0 while 1: yield str(deltacount) deltacount+=1 a_numb_file=a_numb_file_gen() a_numb_patch=a_numb_file_gen() ####################### class Script ######################## class Script: """This class helps create the script 'patch.sh' that is the core of the delta. The script recreates the new deb. See documentation of do_delta_() for details. """ def __init__(self, delta_uses_infifo): #start writing script self.fd=open(TD+'PATCH/patch.sh','w') self.fd.write('#!/bin/bash -e\n') self.member=None self.current_chunk_name=None self.delta_uses_infifo=delta_uses_infifo if delta_uses_infifo : #create the fifo as input for xdelta3 self.the_fifo=a_numb_file.next() self.fd.write("mkfifo %s\n" % self.the_fifo) else: self.the_fifo=None #this is used when recompressing data.tar.zx self.xz_parameters=None def write(self,s): "verbatim write in the script" self.fd.write(s) def close(self): if self.the_fifo: self.fd.write("rm %s\n" % self.the_fifo) self.fd.close() def zip(self, n, cn, newhead=None): """inverts the unzip() function ; optionally, forces .gz header (to fight changes in libz) This is obsolete, not efficient, left as a compatibility layer.""" self.fd.write('cat "'+n+'" | ') self.zip_piped(cn, newhead) self.fd.write(" > '"+n+cn+"' && rm '"+n+"'\n") def zip_piped(self, cn, newhead=None): "inverts the unzip() function, with piped behaviour" if cn == '.gz' : if newhead: s=prepare_for_echo(newhead) self.fd.write("($E '"+ s +"' && ./minigzip -9 | tail -c +"+str(len(newhead)+1)+')') else: self.fd.write('./minigzip -9') elif cn == '.bz2' : info_append('needs-minibzip2') self.fd.write('./minibzip2 -9') elif cn == '.lzma' : info_append('needs-lzma') self.fd.write('lzma -9') elif cn == '.xz' : info_append('needs-xz') if self.xz_parameters==None: self.fd.write('xz -c') else: self.fd.write('xz -c '+self.xz_parameters) else: assert(0) def start_member(self, ar_line, newname, extrachar): "start a new 'ar' member" self.member=newname self.ar_line=ar_line self.extrachar=extrachar assert(self.current_chunk_name==None) self.fd.write('{\n') def end_member(self): assert(self.member) self.member=None self.fd.write('}\n') def start_rebuilding(self): "starts the first part of the delta/recompressing pipe (for a 'ar' member)" self.fd.write('(') def end_rebuilding(self): "ends the first part of the delta/recompressing pipe" self.fd.write(')|') #pipe the delta/recompressing pipe def recompressing(self,new_filename,new_filename_ext,new_file_zip_head): self.fd.write('(') append_NEW_file(self.ar_line) script.zip_piped(new_filename_ext,new_file_zip_head) self.fd.write(' >> NEW.file\n') #end delta tar if self.extrachar : append_NEW_file(self.extrachar) self.fd.write(')\n') def recompressing_by_arg(self,new_filename,new_filename_ext,new_file_zip_head,new_file_size): "flexible recompressing for data.tar , depending on first argument passed to the script" self.fd.write('( if test "$1" = "" \n then\n') self.recompressing(new_filename,new_filename_ext,new_file_zip_head) self.fd.write('elif test "$1" = "unzipped" \n then\n') #http://en.wikipedia.org/wiki/Ar_(Unix) assert(new_filename == 'NEW/data.tar') ar_line_unzipped='data.tar'.ljust(16)+self.ar_line[16:48]+str(new_file_size).ljust(10)+'`\n' append_NEW_file(ar_line_unzipped) #there is no extra char, tar is 512b blocks self.fd.write('cat >> NEW.file \n elif test "$1" = "piped" \n then cat >> data.pipe \n fi )\n') def start_chunk(self, current_chunk_name): "start the pipe to create the chunk. The chunk is always piped" self.fd.write('(') self.current_chunk_name=current_chunk_name def end_chunk(self, current_chunk_name): """this ends the 'data part' of a chunk, and writes the old data somewhere; a successive script code (generated by delta_files() ) will then delta it to transform old data into new data.""" assert(self.current_chunk_name==current_chunk_name) if self.the_fifo: self.fd.write(') > '+self.the_fifo+'&\n') #write to fifo, background else: self.fd.write(') > '+current_chunk_name+'\n') #write chunk self.current_chunk_name=None def md5_check_file(self, n, md5=None): "add a md5 check in the script (this is done only if a lot -d are passed on cmdline)" if md5==None: assert(os.path.isfile(TD+n)) md5=compute_md5(TD+n) print " adding extra MD5 for ",n self.fd.write('echo "'+md5+' '+n+'" | md5sum -c --quiet\n') # if it is too quiet, try #script.write('echo "'+md5+' '+n+'" | md5sum -c --quiet || { echo '+repr(n)+' is corrupt ; exit 1 ;}\n') ####################### end of class Script ######################## ##### unpack control.tar.gz, scan control, write parameters info=[] def info_append(s): "smart appending that avoids duplicate entries" if s not in info: info.append(s) for o in 'OLD', 'NEW' : os.mkdir(TD+o+'/CONTROL') #unpack control.tar.gz system(('ar','p',TD+o+'.file','control.tar.gz','|','tar','-xzf','-','-C',TD+o+'/CONTROL'),TD) ## scan control p=open(TD+'/'+o+'/CONTROL/control') s=[] scan_control(p,params=None,prefix=o,info=s) p.close() if VERBOSE : print ' '+o+': '+join([o[4:] for o in s],' ') info = info + s del s,p info.append('OLD/Size: '+str(olddebsize)) info.append('NEW/Size: '+str(newdebsize)) params=info_2_db(info) #scan debdelta.conf to find any special requirement debdelta_conf=ConfigParser.SafeConfigParser() debdelta_conf.read(['/etc/debdelta/debdelta.conf', expanduser('~/.debdelta/debdelta.conf') ]) debdelta_conf_skip=[] for s in debdelta_conf.sections(): if fnmatch.fnmatch(params['OLD/Package'],s): opt=debdelta_conf.options(s) if 'skip' in opt: debdelta_conf_skip += debdelta_conf.get(s,'skip').split(';') break if VERBOSE > 1 : print ' debdelta.conf says we will skip: ', repr(debdelta_conf_skip) gpg_hashes = {} if DO_MD5 : # compute a MD5 of NEW deb newdeb_md5sum=compute_md5(TD+'NEW.file') info.append('NEW/MD5sum: '+ newdeb_md5sum) else: newdeb_md5sum=None if NEEDSOLD : #this delta needs the old deb info.append('needs-old') else: info.append('old-data-tree') info.append('old-control-tree') #do we use a fifo as input for xdelta3 delta_uses_infifo = ('xdelta3-fifo' not in DISABLED_FEATURES) and (USE_DELTA_ALGO == 'xdelta3') #Note that there is just one object instanced from class Script() script=Script(delta_uses_infifo) a=USE_DELTA_ALGO if a == 'xdelta-bzip': a='xdelta' if not os.path.exists('/usr/bin/'+a): raise DebDeltaError('please install the package "'+a+'".', retriable=True) if delta_uses_infifo: info.append('needs-xdelta3-fifo') else: info.append('needs-'+a) del a #### check for disk space if 'NEW/Installed-Size' in params and 'OLD/Installed-Size' in params: free=freespace(TD) instsize=int(params['NEW/Installed-Size']) + int(params['OLD/Installed-Size']) if free and free < ( instsize * 1024 + + 2**23 + MAXMEMORY / 6 ) : raise DebDeltaError(' Not enough disk space (%dkB) for creating delta (needs %dkB).' % \ ( int(free/1024) , instsize ) , True ) ############# check for conffiles a=TD+'/OLD/CONTROL/conffiles' if os.path.exists(a): p=open(a) #files do not have leading / old_conffiles=[ de_bar(a) for a in p.read().split('\n') if a] p.close() else: old_conffiles=[] ## a=TD+'/OLD/CONTROL/list' ## if os.path.exists(a): ## p=open(a) ## for a in p: ## a=de_bar(de_n(a)) ## for j in debdelta_conf_skip: ## if fnmatch(a,j): ## old_conffiles.append(a) #OK, this abuses the name of the var a bit ## print ' REPR skip ',repr(a) ## else: ## print ' The old debian package ',olddeb,' does not contain a file list?!?' def shell_not_allowed(name): "Strings that I do not trust to inject into the shell script; maybe I am a tad too paranoid..." #FIXME should use it , by properly quoting for the shell script return '"' in name or "'" in name or '\\' in name or '`' in name # uses MD5 to detect identical files (even when renamed) def scan_md5(n): md5={} f=open(n) a=de_n(f.readline()) while a: m , n = a[:32] , de_bar( a[34:] ) md5[n]=m a=de_n(f.readline()) f.close() return md5 new_md5=None if os.path.exists(TD+'/NEW/CONTROL/md5sums'): new_md5=scan_md5(TD+'/NEW/CONTROL/md5sums') old_md5=None if os.path.exists(TD+'/OLD/CONTROL/md5sums') : old_md5=scan_md5(TD+'/OLD/CONTROL/md5sums') ############### some routines to prepare delta of two files def patch_append(f): #possibly GPG if DO_GPG: gpg_hashes[f] = _compute_hashes_(TD+'PATCH/'+f) if VERBOSE > 2 : a=os.stat(TD+'PATCH/'+f)[ST_SIZE] print ' appending ',f,' of size ', a,' to delta, %3.2f' % ( a * 100. / newdebsize ) , '% of new .deb' system(['ar','qSc', delta,f], TD+'/PATCH') unlink(TD+'PATCH/'+f) def verbatim(f): pp=a_numb_patch.next() p = 'PATCH/'+pp if VERBOSE > 1 : print ' including "',name,'" verbatim in patch' os.rename(TD+f,TD+p) patch_append(pp) return p def unzip(f): c='' if f[-3:] == '.gz' : system(('gunzip',f),TD) f=f[:-3] c='.gz' elif f[-4:] == '.bz2' : system(('bunzip2',f),TD) f=f[:-4] c='.bz2' elif f[-5:] == '.lzma' : info_append('needs-lzma') system(('unlzma',f),TD) f=f[:-5] c='.lzma' elif f[-3:] == '.xz' : info_append('needs-xz') system(('unxz',f),TD) f=f[:-3] c='.xz' else: raise NotImplementedError(' dont know how to decompress '+repr(f)) return (f,c) def delta_files__(o, n, p, algo, outpiped, infifo): "delta of file 'o' to 'n' using/producing patch 'p' ; xdelta3 can also pipe" this_delta_outpiped=False #bdiff #http://www.webalice.it/g_pochini/bdiff/ if algo == 'bdiff': system(('~/debdelta/bdiff-1.0.5/bdiff','-q','-nooldmd5','-nonewmd5','-d',o,n,p),TD) script.write('~/debdelta/bdiff-1.0.5/bdiff -p '+o+' '+p+' '+n+'\n') #zdelta #http://cis.poly.edu/zdelta/ elif algo == 'zdelta': system(('~/debdelta/zdelta-2.1/zdc',o,n,p),TD) script.write('~/debdelta/zdelta-2.1/zdu '+o+' '+p+' '+n+'\n') #bdelta #http://deltup.sf.net elif algo == 'bdelta': system(('~/debdelta/bdelta-0.1.0/bdelta',o,n,p),TD) script.write('~/debdelta/bdelta-0.1.0/bpatch '+o+' '+n+' '+p+'\n') #diffball #http://developer.berlios.de/projects/diffball/ elif algo == 'diffball': system(('~/debdelta/diffball-0.7.2/differ',o,n,p),TD) script.write('~/debdelta/diffball-0.7.2/patcher '+o+' '+p+' '+n+'\n') #rdiff elif algo == 'rdiff': system(('rdiff','signature',o,'sign_file.tmp'),TD) system(('rdiff','delta','sign_file.tmp',n,p),TD) script.write('rdiff patch '+o+' '+p+' '+n+'\n') #xdelta3 elif algo == 'xdelta3' : system(('xdelta3','-9','-R','-D','-n','-S','djw','-s',o,n,p),TD) if infifo : o = infifo #use fifo as input if outpiped: this_delta_outpiped=True script.write('xdelta3 -d -R -D -c -s '+o+' '+p+'\n') else: script.write('xdelta3 -d -R -D -s '+o+' '+p+' '+n+'\n') ## according to the man page, ## bsdiff uses memory equal to 17 times the size of oldfile ## but , in my experiments, this number is more like 12. ##But bsdiff is sooooo slow! elif algo == 'bsdiff' : # not ALLOW_XDELTA or ( osize < (MAXMEMORY / 12)): system(('bsdiff',o,n,p),TD) script.write('bspatch '+o+' '+n+' '+p+'\n') #seems that 'xdelta' is buggy on 64bit and different-endian machines #xdelta does not deal with different endianness! elif algo == 'xdelta-bzip' : system(('xdelta','delta','--pristine','--noverify','-0','-m'+str(int(MAXMEMORY/1024))+'k',o,n,p),TD) system('bzip2 -9 '+p,TD,(p,)) script.write('bunzip2 '+p+'.bz2 ; xdelta patch '+p+' '+o+' '+n+'\n') p += '.bz2' elif algo == 'xdelta' : system(('xdelta','delta','--pristine','--noverify','-9','-m'+str(int(MAXMEMORY/1024))+'k',o,n,p),TD) script.write('xdelta patch '+p+' '+o+' '+n+'\n') elif algo == 'jojodiff' : system(('~/debdelta/jdiff06/src/jdiff','-b',o,n,p),TD) script.write('~/debdelta/jdiff06/src/jpatch '+o+' '+p+' '+n+'\n') else: raise AssertionError(' unsupported delta algo ') return p, this_delta_outpiped def delta_files(o, n, outpiped=None, infifo=None): " compute delta of two files , and prepare the script consequently" nsize = os.path.getsize(TD+n) osize = os.path.getsize(TD+o) if VERBOSE > 1 : print ' compute delta for %s (%dkB) and %s (%dkB)' % \ (o,osize/1024,n,nsize/1024) # p = 'PATCH/'+a_numb_patch.next() tim = -time.time() # if DEBUG > 3 : script.md5_check_file(o) # if USE_DELTA_ALGO == 'bsdiff' and osize > ( 1.1 * (MAXMEMORY / 12)) and VERBOSE : print ' Warning, memory usage by bsdiff on the order of %dMb' % (12 * osize / 2**20) # p, this_delta_outpiped = delta_files__(o, n, p, USE_DELTA_ALGO, outpiped, infifo) #script.write(s) # if DEBUG > 2 and not this_delta_outpiped : script.md5_check_file(n) # tim += time.time() # global bsdiff_time, bsdiff_datasize bsdiff_time += tim bsdiff_datasize += nsize # if infifo: script.write('rm '+p+'\n') else: script.write('rm '+o+' '+p+'\n') ## how did we fare ? deltasize = os.path.getsize(TD+p) if VERBOSE > 1 : print ' delta is %3.2f%% of %s, speed: %dkB /sec' % \ ( ( deltasize * 100. / nsize ) , n, (nsize / 1024. / ( tim + 0.001 ))) #save it patch_append(p[6:]) #clean up unlink(TD+o) return this_delta_outpiped def cmp_gz(o,n): "compare gzip files, ignoring header; returns first different byte (+-10), or True if equal" of=open(o) nf=open(n) oa=of.read(10) na=nf.read(10) if na[:3] != '\037\213\010' : print ' Warning: was not created with gzip: ',n nf.close() ; of.close() return 0 if oa[:3] != '\037\213\010' : print ' Warning: was not created with gzip: ',o nf.close() ; of.close() return 0 oflag=ord(oa[3]) if oflag & 0xf7: print ' Warning: unsupported .gz flags: ',oct(oflag),o if oflag & 8 : #skip orig name oa=of.read(1) while ord(oa) != 0: oa=of.read(1) l=10 nflag=ord(na[3]) if nflag & 0xf7: print ' Warning: unsupported .gz flags: ',oct(nflag),n if nflag & 8 : #skip orig name na=nf.read(1) s=na while ord(na) != 0: na=nf.read(1) s+=na l+=len(s) #print repr(s) while oa and na: oa=of.read(2) na=nf.read(2) if oa != na: return l l+=2 if oa or na: return l return True def delta_gzipped_files(o,n): "delta o and n, replace o with n" assert(o[-3:] == '.gz' and n[-3:] == '.gz') before=cmp_gz(TD+o,TD+n) if before == True: if VERBOSE > 3: print ' equal but for header: ',n return #compare the cost of leaving as is , VS the minimum cost of delta newsize=os.path.getsize(TD+n) if ( newsize - before + 10 ) < 200 : if VERBOSE > 3: print ' not worthwhile gunzipping: ',n return f=open(TD+n) a=f.read(10) f.close() if a[:3] != '\037\213\010' : print ' Warning: was not created with gzip: ',n return flag=ord(a[3]) # mostly ignored :-> orig_name='-n' if flag & 8: orig_name='-N' if flag & 0xf7: print ' Warning: unsupported .gz flags: ',oct(flag),n #a[4:8] #mtime ! ignored ! FIXME will be changed... #from deflate.c in gzip source code format=ord(a[8]) FAST=4 SLOW=2 #unfortunately intermediate steps are lost.... pack_level=6 if format == 0 : pass elif format == FAST : pack_level == 1 elif format == SLOW : pack_level == 9 else: print ' Warning: unsupported compression .gz format: ',oct(format),n return if a[9] != '\003' : if VERBOSE : print ' Warning: unknown OS in .gz format: ',oct(ord(a[9])),n #OK, it seems we can play our trick p='_tmp_' #unzip new file pro=subprocess.Popen(('zcat',TD+n),stdout=open(TD+p+'.new','w'), cwd=TD) pro.wait() if pro.returncode: raise DebDeltaError('argghh, zcat failed on us') #test our ability of recompressing l=[1,2,3,4,5,6,7,8,9] del l[pack_level] l.append(pack_level) l.reverse() best_r=0 best_flag=None for i in l: #force -n ... no problem with timestamps gzip_flags="-"+str(i) pro=subprocess.Popen(("gzip",'-c','-n',gzip_flags,TD+p+'.new'),stdout=open(TD+p+'.faked.gz','w')) pro.wait() if pro.returncode: DebDeltaError('Argh, gzip failed on us') r=cmp_gz(TD+n,TD+p+'.faked.gz') if r > best_r : best_r = r best_flag = gzip_flags if r == True: break if i == pack_level and VERBOSE > 3: print ' warning: wrong guess to re-gzip to equal file: ',gzip_flags,r,n if r != True: if VERBOSE > 1 : print ' warning: cannot re-gzip to equal file, best was %d / %d , "%s" : %s '%(best_r,newsize,best_flag,n) os.unlink(TD+p+".new") ; os.unlink(TD+p+'.faked.gz') return #actual delta of decompressed files pro=subprocess.Popen(('zcat',TD+o),stdout=open(TD+p+'.old','w'), cwd=TD) pro.wait() if pro.returncode: raise DebDeltaError('argh, zcat failed on us') script.write("zcat '"+o+"' > "+p+".old ; rm '"+o+"' \n") if VERBOSE > 2 : print ' ',n[9:],' (= to %d%%): ' % (100*before/newsize) , delta_files(p+'.old',p+'.new') script.write("gzip -c -n "+gzip_flags+" < "+p+".new > '"+o+"' ; rm "+p+".new\n") #replace the old file with the best that we can do re-gzipping the new file #this is important in the rest of the delta-ing process os.rename(TD+p+'.faked.gz',TD+o) if DEBUG > 1 : script.md5_check_file(o, compute_md5(TD+o)) def guess_xz_parameters(o, check=True): "tries to guess the parameters used to compress, returns a string of options ; if check=True and it fails, returns False " par='' crc='' z=tempfile.NamedTemporaryFile(suffix='.xz',delete=False) #unfortunately 'xz --list' does not work on pipes! shutil.copyfileobj(o,z) z.flush() #assert os.path.exists(z.name) #os.system('file '+z.name) #os.system('xz -cc --list '+z.name) b=subprocess.Popen(['xz','-vv','--robot','--list',z.name],stdout=subprocess.PIPE) for a in b.stdout: a=a.rstrip('\n') a=string.split(a,'\t') if a[0]=='block': if crc and crc != a[9]: print " warning : this xz -- compressed file was compressed with variable blocks crc ?! '%s' != '%s'" & (crc,a[9]) crc=a[9] if par and par != a[15]: print " warning : this xz -- compressed file was compressed with variable blocks options ?! '%s' != '%s'" & (par,a[15]) par=a[15] #print ' guessed par crc ',par,crc if crc: crc=crc.lower() if crc=='sha-256': crc='sha256' if crc not in ('crc32','crc64','sha256'): print ' Unknown XZ crc ',crc crc='' PARS=['-6e','-9','-9e'] if par: PARS.append(par) if check: redo=True while redo and PARS: par=PARS.pop() y=subprocess.Popen(['unxz','-c',z.name],stdout=subprocess.PIPE) w=['xz','-c'] if par: w.append(par) if crc: w+=['-C',crc] w.append('-') if VERBOSE > 2 : print ' Testing XZ options ',w w=subprocess.Popen(w,stdin=y.stdout,stdout=subprocess.PIPE,close_fds=True) y.stdout.close() redo=False c=subprocess.Popen(['cmp','-',z.name],stdin=w.stdout,stdout=open(os.devnull,'w'),close_fds=True) w.stdout.close() if c.wait() : redo=True if VERBOSE or DEBUG : print ' Tried XZ options but failed: ',par,crc if redo and not PARS: #print ' HO FINITO LE OPZIONI !' os.unlink(z.name) return False if crc: crc=' -C '+crc os.unlink(z.name) return par+crc ########### helper sh functions for script, for delta_tar() import difflib, re re_numbers=re.compile(r'^[0-9][0-9]*$') def file_similarity_premangle(fp): fps=fp.split('/') bns=fps[-1].split('.') j=len(bns)-1 #search first "non numeric" extension, and put it last while j >= 0 and re_numbers.match(bns[j]): j-=1 if j >= 0: a=bns.pop(j) r = fps[:-1] + bns + [a,] else: r = fps[:-1] + bns return r def files_similarity_score__noext__(oo,nn): ln=len(nn) lo=len(oo) l=0 while oo and nn: while oo and nn and oo[-1] == nn[-1]: oo=oo[:-1] nn=nn[:-1] if not oo or not nn: break while oo and nn and oo[0] == nn[0]: oo=oo[1:] nn=nn[1:] if not oo or not nn: break if len(nn) > 1 and oo[0] == nn[1]: l+=1 nn=nn[1:] if len(oo) > 1 and oo[1] == nn[0]: l+=1 oo=oo[1:] if not oo or not nn: break if oo[-1] != nn[-1]: oo=oo[:-1] nn=nn[:-1] l+=2 if not oo or not nn: break if oo[0] != nn[0]: oo=oo[1:] nn=nn[1:] l+=2 return (l +len(oo) + len(nn)) * 2.0 / float(ln+lo) def files_similarity_score__(oo,nn): oo=copy(oo) nn=copy(nn) if oo.pop() != nn.pop() : return 0.2 + files_similarity_score__noext__(oo,nn) else: return files_similarity_score__noext__(oo,nn) def files_similarity_score__difflib__(oo,nn): "compute similarity by difflib. Too slow." if oo == nn : return 0 d=difflib.context_diff(oo,nn,'','','','',0,'') d=[a for a in tuple(d) if a and a[:3] != '---' and a[:3] != '***' ] if oo[-1] != nn[-1] : #penalty for wrong extension return 0.2+float(len(d)) * 2.0 / float(len(oo)+len(nn)) else: return float(len(d)) * 2.0 / float(len(oo)+len(nn)) def files_similarity_score(oo,nn): if oo == nn : return 0 if type(oo) in string_types: oo=file_similarity_premangle(oo) if type(nn) in string_types: nn=file_similarity_premangle(nn) return files_similarity_score__(oo,nn) def fake_tar_header_2nd(): " returns the second part of a tar header , for regular files and dirs" # The following code was contributed by Detlef Lannert. # into /usr/lib/python2.3/tarfile.py MAGIC = "ustar" # magic tar string VERSION = "00" # version number NUL = "\0" # the null character parts = [] for value, fieldsize in ( ("", 100), # unfortunately this is not what DPKG does #(MAGIC, 6), #(VERSION, 2), # this is what DPKG does ('ustar \x00',8), ("root", 32), ("root", 32), ("%07o" % 0, 8), ("%07o" % 0, 8), ("", 155) ): l = len(value) parts.append(value + (fieldsize - l) * NUL) buf = "".join(parts) return buf fake_tar_2nd=fake_tar_header_2nd() fake_tar_2nd_echo=prepare_for_echo(fake_tar_2nd) script.write("FTH='"+fake_tar_2nd_echo+"'\n") script.write("E='echo -ne'\n") global time_corr time_corr=0 #################### vvv delta_tar vvv ########################### def delta_tar(old_filename, new_filename, CWD, old_forensic,\ skip=[], old_md5={}, new_md5={},\ chunked_p=(not delta_uses_infifo) ,debdelta_conf_skip=()): " compute delta of two tar files, and prepare the script consequently" assert( type(old_filename) in string_types or type(old_filename) == FunctionType ) script.write('ECR () { $E "$1" ; $E "${FTH}" ; cat OLD/'+CWD+'/"$1" ; rm OLD/'+CWD+'/"$1" ;}\n') script.write('EC () { $E "$1" ; $E "${FTH}" ; cat OLD/'+CWD+'/"$1" ;}\n') ###### uncompress and scan the old tar file, extract regular files if type(old_filename) in string_types : (old_filename,old_filename_ext) = unzip(old_filename) oldtar = tarfile.open(TD+old_filename, "r") else: old_filename_ext=None oldfileobj = old_filename() oldtar = tarfile.open(mode="r|", fileobj=oldfileobj) oldnames = [] oldtarinfos = {} for oldtarinfo in oldtar: oldname = de_bar(oldtarinfo.name) if old_forensic != None: #fixme : devices are not supported (but debian policy does not allow them) old_forensic.append([oldtarinfo.name,oldtarinfo.mode,oldtarinfo.type,\ oldtarinfo.uid,oldtarinfo.gid,oldtarinfo.uname,oldtarinfo.gname,oldtarinfo.linkname]) #this always happens #if VERBOSE > 3 and oldname != de_bar(oldname): # print ' filename in old tar has weird ./ in front: ' , oldname if not oldtarinfo.isreg(): if VERBOSE > 2 : print ' skipping old non-regular ',repr(oldname) continue if oldtarinfo.size == 0: if VERBOSE > 2 : print ' skipping old empty ',repr(oldname) continue if shell_not_allowed(oldname): if VERBOSE > 2 : print ' skipping non-allowed-name ',repr(oldname) continue for j in debdelta_conf_skip: if fnmatch.fnmatch(oldname,j): if VERBOSE > 2 : print ' skipping following as per rule ',repr(j) skip.append(oldname) break if oldname in skip: if VERBOSE > 2 : print ' skipping ',repr(oldname) if old_forensic != None: oldtar.extract(oldtarinfo,TD+"OLD/"+CWD ) old_forensic.append(old_forensic.pop()[:-1] + \ [hash_to_hex(sha1_hash_file(os.path.join(TD,"OLD",CWD,oldname)))]) continue oldnames.append(oldname) oldtarinfos[oldname] = oldtarinfo oldtar.extract(oldtarinfo,TD+"OLD/"+CWD ) if old_forensic != None: old_forensic.append(old_forensic.pop()[:-1] + \ [hash_to_hex(sha1_hash_file(os.path.join(TD,"OLD",CWD,oldname)))]) oldtar.close() if type(old_filename) in string_types : unlink(TD+old_filename) else: while oldfileobj.read(512): pass ###### scan the new tarfile, save info regarding regular files therein #save header part of new_filename, since it changes in newer versions f=open(TD+new_filename) new_file_zip_head=f.read(20) f.close() (new_filename,new_filename_ext) = unzip(new_filename) new_file_md5=compute_md5(TD+new_filename) new_file_size=os.path.getsize(TD+new_filename) info_append(new_filename+': '+new_file_md5+' '+str(new_file_size)) #### scan the new tarfile, compare to the old tar contents assert(0 == (new_file_size % 512)) newtar = tarfile.open(TD+new_filename, "r") newnames = [] newtarinfos = {} for newtarinfo in newtar: newname = newtarinfo.name #just curious to know t=newtarinfo.type a=newtarinfo.mode if VERBOSE and (( t == '2' and a != 0777 ) or \ ( t == '0' and ( (a & 0400 ) == 0 )) or \ ( t == '5' and ( (a & 0500 ) == 0 ))): print ' weird permission: ',newname,oct(a),repr(newtarinfo.type) ### if not newtarinfo.isreg(): continue if VERBOSE > 3 and newname != de_bar(newname): print ' filename in new tar has weird ./ in front: ' , newname newname = de_bar(newname) newnames.append(newname) newtarinfos[newname] = newtarinfo old_used={} correspondence={} ###### find correspondences between old tar and new tar contents global time_corr time_corr=-time.time() if VERBOSE > 2 : print ' finding correspondences for ', new_filename reverse_old_md5={} if old_md5: for o in old_md5: if o in oldnames: reverse_old_md5[old_md5[o]] = o else: #would you believe? many packages contain MD5 for files they do not ship... if VERBOSE > 1 and o not in skip: print ' hmmm... there is a md5 but not a file: ',o #database of databases of premangled old names , by "extension" and name oldnames_premangle={} for o in oldnames: om=file_similarity_premangle(o) a=om[-1] # "extension" if a not in oldnames_premangle: oldnames_premangle[a]={} oldnames_premangle[a][o]=om for newname in newnames: newtarinfo=newtarinfos[newname] oldname=None #ignore empty files if newtarinfo.size == 0: continue #try correspondence by MD5 if new_md5 and newname in new_md5: md5=new_md5[newname] if md5 in reverse_old_md5: oldname=reverse_old_md5[md5] if VERBOSE > 2 : if oldname == newname : print ' use identical old file: ',newname else: print ' use identical old file: ',oldname, newname #try correspondence by file name if oldname == None and newname in oldnames: oldname=newname if VERBOSE > 2 : print ' use same name old file: ',newname #try correspondence by file name and len similarity np=file_similarity_premangle(newname) ne=np[-1] # "extension" if oldname == None and ne in oldnames_premangle: basescore=1.6 nl=newtarinfo.size for o in oldnames_premangle[ne]: op=oldnames_premangle[ne][o] l=oldtarinfos[o].size sfile=files_similarity_score__noext__(op,np) slen = abs(float(l - nl))/float(l+nl) s=slen+sfile if VERBOSE > 3 : print ' name/len diff %.2f+%.2f=%.2f ' % (slen,sfile,s), o if s < basescore: oldname=o basescore=s if oldname and VERBOSE > 2 : print ' best similar ','%.3f' % basescore,newname,oldname if not oldname: if VERBOSE > 2 : print ' no correspondence for: ',newname continue #we have correspondence, lets store if oldname not in old_used: old_used[oldname]=[] old_used[oldname].append(newname) correspondence[newname]=oldname time_corr+=time.time() if VERBOSE > 1 : print ' time lost so far in finding correspondence %.2f' % time_corr ######### final pass : scan new tar, extract regular files, prepare deltas if VERBOSE > 2 : print ' scanning ',new_filename script.start_rebuilding() current_chunk_name=a_numb_file.next() script.start_chunk(current_chunk_name) mega_cat=open(TD+'/'+current_chunk_name,'w') #helper function def _append_(p, w, rm): mega_cat.write(w+fake_tar_2nd) f=open(TD+p+'/'+w) a=f.read(1024) while a: try: mega_cat.write(a) except OSError,s : raise DebDeltaError(' OSError (at _a_) while writing: '+str(s), True) a=f.read(1024) f.close() if rm: script.write("ECR '"+w+"'\n") unlink(TD+p+'/'+w) else: script.write("EC '"+w+"'\n") global something_backgrounded #FIXME I hate using globals for this :-( something_backgrounded=False #helper function def mega_cat_chunk(oldoffset,newoffset, background=True): global something_backgrounded p = a_numb_file.next() f=open(TD+new_filename) f.seek(oldoffset) of=open(TD+p,'w') l=oldoffset while l 2 : print ' provide also old file ', oldname yield oldname while 1: yield None some_old_file=some_old_file_gen() one_old_file=some_old_file.next() max_chunk_size = MAXMEMORY / 12 chunk_discount = 0.3 progressive_new_offset=0 for newtarinfo in newtar: #progressive mega_cat a=mega_cat.tell() if chunked_p and ((a >= max_chunk_size * chunk_discount) or \ (a >= max_chunk_size * chunk_discount * 0.9 and one_old_file ) or \ (a>0 and (a+newtarinfo.size) >= max_chunk_size * chunk_discount )): #provide some old unused files, if any while one_old_file: _append_("OLD/"+CWD, one_old_file, False) if mega_cat.tell() >= max_chunk_size * chunk_discount : break one_old_file=some_old_file.next() #write the chunk into a temporary mega_cat.close() script.end_chunk(current_chunk_name) #delta the chunk mega_cat_chunk(progressive_new_offset, newtarinfo.offset ) #start a new chunk current_chunk_name=a_numb_file.next() script.start_chunk(current_chunk_name) mega_cat=open(TD+'/'+current_chunk_name,'w') # progressive_new_offset=newtarinfo.offset chunk_discount = min( 1. , chunk_discount * 1.2 ) # name = de_bar( newtarinfo.name ) if newtarinfo.isdir(): #recreate also parts of the tar headers mega_cat.write(newtarinfo.name+fake_tar_2nd) script.write("$E '"+prepare_for_echo(newtarinfo.name)+"'\"${FTH}\"\n") if VERBOSE > 2 : print ' directory in new : ', name continue if not newtarinfo.isreg(): #recreate also parts of the tar headers mega_cat.write(newtarinfo.name+fake_tar_2nd) script.write("$E '"+prepare_for_echo(newtarinfo.name)+"'\"${FTH}\"\n") if VERBOSE > 2 : print ' not regular in new : ', name continue if newtarinfo.size == 0: #recreate also parts of the tar headers mega_cat.write(newtarinfo.name+fake_tar_2nd) script.write("$E '"+prepare_for_echo(newtarinfo.name)+"'\"${FTH}\"\n") if VERBOSE > 2 : print ' empty new file : ', name continue if name not in correspondence: #recreate also parts of the tar headers mega_cat.write(newtarinfo.name+fake_tar_2nd) script.write("$E '"+prepare_for_echo(newtarinfo.name)+"'\"${FTH}\"\n") if VERBOSE > 2: print ' no corresponding fil: ', name continue oldname = correspondence[name] mul=len( old_used[oldname]) > 1 #multiple usage if not mul and oldname == name and oldname[-3:] == '.gz' and \ newtarinfo.size > 120 and \ not ( new_md5 and name in new_md5 and old_md5 and name in old_md5 and \ new_md5[name] == old_md5[name]): newtar.extract(newtarinfo,TD+"NEW/"+CWD ) delta_gzipped_files("OLD/"+CWD+'/'+name,"NEW/"+CWD+'/'+name) if VERBOSE > 2 : print ' adding reg file: ', oldname, mul and '(multiple)' or '' _append_("OLD/"+CWD, oldname , not mul ) old_used[oldname].pop() #end of for loop #write the chunk into a temporary mega_cat.close() script.end_chunk(current_chunk_name) if os.path.exists(TD+'/OLD/'+CWD): rmtree(TD+'/OLD/'+CWD) if os.path.getsize(TD+'/'+current_chunk_name) > 0 : mega_cat_chunk(progressive_new_offset, os.path.getsize(TD+new_filename), background=False) else: #the (tail of the) new tar did not match anything in the old tar, nothing to delta p=verbatim(new_filename) script.write("cat '"+p+"'\n") script.end_rebuilding() #pipes the rebuilding part into the recompressing part if new_filename == 'NEW/data.tar': script.recompressing_by_arg(new_filename,new_filename_ext,new_file_zip_head,new_file_size) else: script.recompressing(new_filename,new_filename_ext,new_file_zip_head) #################### ^^^^ delta_tar ^^^^ ########################### ############ start computing deltas def append_NEW_file(s): 'appends some data to NEW.file' s=prepare_for_echo(s) script.write("$E '"+ s +"' >> NEW.file\n") #this following is actually #def delta_debs_using_old(old,new): ### start scanning the new deb newdeb_file=open(newdeb) # pop the "!\n" s = newdeb_file.readline() assert( "!\n" == s) append_NEW_file(s) #process all contents of old vs new .deb ar_list_old= list_ar(TD+'OLD.file') ar_list_new= list_ar(TD+'NEW.file') if forensic_file==None: control_forensic=None data_forensic=None else: control_forensic=[] data_forensic=[] for name in ar_list_new : newname = 'NEW/'+name system(('ar','x',TD+'NEW.file',name), TD+'/NEW/') script.xz_parameters=None newsize = os.stat(TD+newname)[ST_SIZE] if VERBOSE > 1: print ' studying ' , name , ' of len %dkB' % (newsize/1024) #add 'ar' structure ar_line = newdeb_file.read(60) if VERBOSE > 3: print ' ar line: ',repr(ar_line) assert( ar_line[:len(name)] == name and ar_line[-2] == '`' and ar_line[-1] == '\n' ) #sometimes there is an extra \n, depending if the previous was odd length newdeb_file.seek(newsize ,1) if newsize & 1 : extrachar = newdeb_file.read(1) else: extrachar = '' #add file to delta if newsize < 128: #file is too short to compute a delta, append_NEW_file(ar_line) p=open(TD+newname) append_NEW_file( p.read(newsize)) p.close() unlink(TD+newname) #pad new deb if extrachar : append_NEW_file(extrachar) elif not NEEDSOLD and name[:11] == 'control.tar' : script.start_member(ar_line, newname, extrachar) #(mm this is almost useless, just saves a few bytes) oldname = 'OLD/'+name system(('ar','x',TD+'OLD.file',name), TD+'OLD') ##avoid using strange files that dpkg may not install in /var...info/ skip=[] for a in os.listdir(TD+'OLD/CONTROL') : if a not in dpkg_keeps_controls: skip.append(a) #delta it delta_tar(oldname,newname,'CONTROL',control_forensic,skip) script.end_member() elif not NEEDSOLD and name[:8] == 'data.tar' : script.start_member(ar_line, newname, extrachar) # if 'data.tar.lzma' == name : info_append('needs-lzma') elif 'data.tar.xz' == name : info_append('needs-xz') script.xz_parameters=guess_xz_parameters(my_popen_read('cd '+TD+'; ar p NEW.file data.tar.xz')) if script.xz_parameters==False: raise DebDeltaError('Cannot guess XZ parameters') # if 'data.tar.gz' in ar_list_old : def x(): return my_popen_read('cd '+TD+'; ar p OLD.file data.tar.gz | gzip -cd') elif 'data.tar.bz2' in ar_list_old : def x(): return my_popen_read('cd '+TD+'; ar p OLD.file data.tar.bz2 | bzip2 -cd') elif 'data.tar.lzma' in ar_list_old : info_append('needs-lzma') def x(): return my_popen_read('cd '+TD+'; ar p OLD.file data.tar.lzma | unlzma -c') elif 'data.tar.xz' in ar_list_old : info_append('needs-xz') def x(): return my_popen_read('cd '+TD+'; ar p OLD.file data.tar.xz | unxz -c') else: assert(0) delta_tar(x,newname,'DATA',data_forensic,old_conffiles,old_md5,new_md5,\ debdelta_conf_skip=debdelta_conf_skip) del x script.end_member() elif not NEEDSOLD or name not in ar_list_old : #or it is not in old deb append_NEW_file(ar_line) patchname=verbatim(newname) script.write('cat '+patchname+' >> NEW.file ; rm '+patchname+'\n') #pad new deb if extrachar : append_NEW_file(extrachar) elif NEEDSOLD : append_NEW_file(ar_line) #file is long, and has old version ; lets compute a delta oldname = 'OLD/'+name system(('ar','x',TD+'OLD.file',name), TD+'OLD') script.write('ar p OLD.file '+name+' >> '+oldname+'\n') (oldname,co) = unzip(oldname) (newname,cn) = unzip(newname) delta_files(oldname,newname) script.zip(newname,cn) script.write('cat '+newname+cn+' >> NEW.file ; rm '+newname+cn+'\n') unlink(TD+newname) #pad new deb if extrachar : append_NEW_file(extrachar) del co,cn else: die('internal error j98') # put in script any leftover s = newdeb_file.read() if s: if VERBOSE > 2: print ' ar leftover character: ',repr(s) append_NEW_file(s) del s #this is done already from the receiving end if DEBUG > 2 and newdeb_md5sum : script.md5_check_file("NEW.file",md5=newdeb_md5sum) #script is done script.close() if forensic_file: forensics_rfc(forensic_file,info,True,control_forensic,data_forensic,old_conffiles) patchsize = os.stat(TD+'PATCH/patch.sh')[ST_SIZE] patch_files = [] if 'lzma' not in DISABLED_FEATURES and os.path.exists('/usr/bin/lzma'): system(('lzma','-q','-9','-k','PATCH/patch.sh'), TD) patch_files.append((os.path.getsize(TD+'PATCH/patch.sh.lzma'), 'lzma', 'patch.sh.lzma')) if 'xz' not in DISABLED_FEATURES and os.path.exists('/usr/bin/xz'): system(('xz','-q','-9','-k','PATCH/patch.sh'), TD) patch_files.append((os.path.getsize(TD+'PATCH/patch.sh.xz'), 'xz', 'patch.sh.xz')) system(('bzip2','-q','--keep','-9','PATCH/patch.sh'), TD) patch_files.append((os.path.getsize(TD+'PATCH/patch.sh.bz2'), 'bzip2', 'patch.sh.bz2')) system(('gzip','-q','-9','-n','PATCH/patch.sh'), TD) patch_files.append((os.path.getsize(TD+'PATCH/patch.sh.gz'), 'gzip', 'patch.sh.gz')) # Use the smallest compressed patch.sh patch_files.sort() if VERBOSE > 1 : print ' '+patch_files[0][1]+' wins on patch.sh' if patch_files[0][1] == 'lzma': info_append('needs-lzma') if patch_files[0][1] == 'xz': info_append('needs-xz') patch_append(patch_files[0][2]) del patch_files #OK, OK... this is not yet correct, since I will add the info file later on elaps = time.time() - start_sec info.append('DeltaTime: %.2f' % elaps) deltasize = os.stat(delta)[ST_SIZE] + 60 + sum(map(len,info)) percent = deltasize * 100. / newdebsize info.append('Ratio: %.4f' % (float(deltasize) / float(newdebsize)) ) if VERBOSE: #note that sizes are written as kB but are actually kibybytes, that is 1024 bytes print ' ' + _('delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of %(tot)dkB.') % \ {'perc' : percent , 'save' : (( newdebsize -deltasize ) / 1024), 'tot' : ( newdebsize/ 1024)} print ' ' + _('delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time %(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)') % \ {'time' : elaps, 'speed' : newdebsize / 1024. / (elaps+0.001), 'algo' : USE_DELTA_ALGO, 'algotime' : bsdiff_time, 'algospeed' : bsdiff_datasize / 1024. / (bsdiff_time + 0.001) , 'corrtime' : time_corr } return (delta, percent, elaps, info, gpg_hashes) ##################################################### compute many deltas def do_deltas(debs): exitstatus=0 if not debs: print _('Warning, no non-option arguments, debdeltas does nothing.') if not DEBUG: return elif not OLD and not CLEAN_DELTAS: print _('Warning, no --old arguments, debdeltas will not generate any deltas.') if not DEBUG: return original_cwd = os.getcwd() start_time = time.time() try: import apt_pkg if 'init_system' in dir(apt_pkg): apt_pkg.init_system() elif 'InitSystem' in dir(apt_pkg): apt_pkg.InitSystem() else: assert 'fegrwq' == 0 if 'VersionCompare' in dir(apt_pkg): from apt_pkg import VersionCompare elif 'version_compare' in dir(apt_pkg): from apt_pkg import version_compare as VersionCompare else: assert 'lopadf' == 0 except ImportError: raise DebDeltaError('python module "apt_pkg" is missing. Please install python-apt', retriable=True) info_by_pack_arch={} def info_by_pack_arch_add(f): pack = f['Package'] arch = f['Architecture'] if (pack,arch) not in info_by_pack_arch : info_by_pack_arch[ (pack,arch) ]=[] info_by_pack_arch[ (pack,arch) ].append( f ) def iterate_Packages(packages, use_debian_822=True): fields=('Package','Architecture','Version','Filename') for f in fields: intern(f) packages=abspath(packages) assert os.path.isfile(packages) assert os.path.basename(packages) in ('Packages', 'Packages.gz','Packages.bz2','Packages.xz') dir=os.path.dirname(packages) dir=dir.split('/') try: a=dir.index('dists') except ValueError: sys.stderr.write('Error: pathname "%s" does not contain "dists"\n' % packages) return base = string.join(dir[:a],'/') # cache=cache_same_dict(packages, fields) if DO_CACHE and cache.exists: for i in cache: i['Basepath'] = base yield i if not cache.broken: return # if packages[-3:] == '.gz': import gzip F=gzip.GzipFile(packages) SP=None elif packages[-4:] == '.bz2': import bz2 F=bz2.BZ2File(packages) SP=None elif packages[-3:] == '.xz': SP=subprocess.Popen(["xzcat",packages], stdout=subprocess.PIPE, stdin=open(os.devnull), close_fds=True) F=SP.stdout else: F=open(packages) SP=None # if debian_deb822 and use_debian_822: #use faster implementation #P=debian_deb822.Packages(F,fields=fields) for a in debian_deb822.Packages.iter_paragraphs(sequence=F,shared_storage=False,fields=fields): if DO_CACHE and not cache.exists: cache.write(a) a['Basepath'] = base yield a if SP: F.read() SP.wait() return # of,pack,vers,arch=None,None,None,None for l in F: l=l.rstrip('\n') if l[:9] == 'Package: ': pack=l[9:] elif l[:14] == 'Architecture: ': arch = l[14:] elif l[:9] == 'Version: ': vers = l[9:] elif l[:10] == 'Filename: ': of=l[10:] elif l == '': if of == None or pack == None or vers == None or arch == None: print 'Warning, skipping incomplete record in index:',of,pack,vers,arch continue if of[-4:] == '.udeb': if VERBOSE > 2 : print ' skip udeb: ' continue a={} a['Filename'] = of a['Package'] = pack a['Architecture'] = arch a['Version'] = vers if DO_CACHE and not cache.exists: cache.write(a) a['Basepath'] = base yield a of,pack,vers,arch=None,None,None,None if SP: F.read() SP.wait() def scan_deb_dir(d, debname, label, lazy): assert (debname == None or type(debname) in string_types) and type(label) in string_types if not os.path.isdir(d): print 'Error, skip non dir:',d return if lazy: scan=scan_deb_byfile_lazy else: scan=scan_deb_byfile for n in os.listdir(d): if n[-4:] != '.deb': continue if debname != None and debname != n.split('_')[0]: continue a=scan(os.path.join(d,n)) a['Label'] = label info_by_pack_arch_add(a) def scan_deb_byfile_lazy(f): n=os.path.basename(f).split('_') a={} a['File']=f a['Package']=n[0] #version cannot be trusted, due to epochs (deleted in filenames) #it is read using scan_deb_bydict, later on a['Architecture']=n[2][:-4] return a info_by_file = {} #cache to speed up scanning file packages def scan_deb_byfile(f): #DEBUG: assert( os.path.isfile(f) ) if f in info_by_file and 'Version' in info_by_file[f]: #already scanned in non-lazy mode return info_by_file[f] a={} a['File']=f p=my_popen_read('ar p "'+f+'" control.tar.gz | tar -x -z -f - -O ./control') scan_control(p,params=a) p.close() return a def scan_deb_bydict(a): p=my_popen_read('ar p "'+a['File']+'" control.tar.gz | tar -x -z -f - -O ./control') scan_control(p,params=a) p.close() return a scanned_delta_dirs=set() def scan_delta_dir(d,debname=None): if (d,debname) in scanned_delta_dirs or (d,None) in scanned_delta_dirs : return if not os.path.isdir(d) : if VERBOSE > 2 and DEBUG : print ' No such delta dir: ',d scanned_delta_dirs.add((d,None)) #trick, if aint there no need to retry return assert debname == None or type(debname) in string_types scanned_delta_dirs.add((d,debname)) for n in os.listdir(d): if debname != None and debname != n.split('_')[0]: continue scan_delta( os.path.join(d,n) ) # contains list of triples (filename,oldversion,newversion) old_deltas_by_pack_arch={} def scan_delta(f): assert( os.path.isfile(f) ) if f[-9:] == '.debdelta' : a=f[:-9] elif f[-17:] == '.debdelta-too-big' : a=f[:-17] elif f[-15:] == '.debdelta-fails' : a=f[:-15] else: return a=os.path.basename(a) a=a.split('_') pa=a[0] ar=a[3] if (pa,ar) not in old_deltas_by_pack_arch: old_deltas_by_pack_arch[ (pa,ar) ]=[] ov=version_demangle(a[1]) nv=version_demangle(a[2]) if (f,ov,nv) not in old_deltas_by_pack_arch[ (pa,ar) ]: old_deltas_by_pack_arch[ (pa,ar) ].append( (f, ov, nv ) ) def delta_dirname(f,altdir): "f=directory, altdir=ALT or DIR with // convention. Returns augmented dirname" if altdir != None: if altdir[-2:] == '//' : if '../' in f: print 'Warning: cannot combine .. and // ! Saving in ', altdir return altdir #os.path.join has a weird behaviour with absolute paths! f=f.lstrip('/') return os.path.join(altdir[:-2], f) else: return altdir else: #this happens when DIR is not set, and the delta goes in the same directory as the deb return f ALT_DOUBLEDIR=[] # directories ending in // for arg in ALT: if os.path.isfile(arg): if os.path.basename(arg) in ('Packages', 'Packages.gz','Packages.bz2'): for a in iterate_Packages(arg): a['Label'] = 'ALT' #DEBUG a['Packages'] = arg info_by_pack_arch_add(a) elif arg[-4: ] == '.deb' : a=scan_deb_byfile_lazy(arg) if a: a['Label'] = 'ALT' info_by_pack_arch_add(a) else: print 'Error! skipping file --alt ',arg elif os.path.isdir(arg): if arg[-2:] != '//' : scan_deb_dir(arg, None, 'ALT', True) else: ALT_DOUBLEDIR.append(arg) else: print 'Error, ignored --alt ',arg for arg in OLD: if os.path.isfile(arg): if os.path.basename(arg) in ('Packages', 'Packages.gz','Packages.bz2'): for a in iterate_Packages(arg): a['Label'] = 'OLD' #DEBUG a['Packages'] = arg info_by_pack_arch_add(a) elif arg[-4: ] == '.deb' : a=scan_deb_byfile(arg) if a: a['Label'] = 'OLD' info_by_pack_arch_add(a) else: print 'Error! skipping file --old ',arg elif os.path.isdir(arg): # no // support yet scan_deb_dir(arg, None, 'OLD', False) else: print 'Error, ignored --old ',arg #scan cmdline arguments and prepare list of debs and deltas for arg in debs: if os.path.isfile(arg): if os.path.basename(arg) in ('Packages', 'Packages.gz','Packages.bz2'): for a in iterate_Packages(arg): a['Label'] = 'CMDLINE' #note that, if a file is scanned more than once, the last label is CMDLINE #DEBUG a['Packages'] = arg ofd = os.path.dirname(a['Filename']) pa = a['Package'] info_by_pack_arch_add(a) for alt in ALT_DOUBLEDIR: scan_deb_dir(delta_dirname(ofd,alt), pa , 'ALT', True) if CLEAN_DELTAS: scan_delta_dir(delta_dirname(ofd,DIR), pa) elif arg[-4: ] == '.deb': a=scan_deb_byfile(arg) if a : a['Label'] = 'CMDLINE' info_by_pack_arch_add(a) ofd = os.path.dirname(arg) for alt in ALT_DOUBLEDIR: scan_deb_dir(delta_dirname(ofd,alt), a['Package'], 'ALT', True) if CLEAN_DELTAS: scan_delta_dir(delta_dirname(ofd,DIR), a['Package']) else: print 'Error! skipping cmdline argument: ',arg elif os.path.isdir(arg): scan_deb_dir(arg, None, 'CMDLINE', False) for alt in ALT_DOUBLEDIR: scan_deb_dir(delta_dirname(arg,alt), None, 'ALT', True) if CLEAN_DELTAS: scan_delta_dir(delta_dirname(arg,DIR)) else: print 'Error, skipping cmd line argument: ',arg if VERBOSE > 1 : print ' total parsing time: %.1f ' % ( -start_time + time.time()) def go_fishing(deb,others): "Find a package filepath if missing, save it in deb['File']. deb = dict representing package ; others: list of such dicts." if 'File' in deb: assert os.path.isfile(deb['File']) return deb['File'] of=deb['Filename'] ob=os.path.basename(of) #try to build it from its own info if 'Basepath' in deb: f=os.path.join(deb['Basepath'],of) if os.path.exists(f): deb['File']=f if DEBUG > 1 : print 'Fish!',deb return f #try to build it from others info others=[a for a in others if id(a) != id(deb) ] for new in others: if 'File' in new: f=new['File'] if os.path.basename(f) == ob: deb['File']=f if DEBUG > 1 : print 'Fish!',deb,new return f if 'Basepath' in new: for a in of,ob: f=os.path.join(new['Basepath'],a) if os.path.exists(f): deb['File']=f if DEBUG > 1 : print 'Fish!',deb,new return f print 'Error, bad fishing for ', deb, others return False def order_by_version(a,b): # lazy packages do not have versions; but are always ALT, so we sort them at bottom if 'Version' not in a: return -1 if 'Version' not in b: return 1 return VersionCompare( a['Version'] , b['Version'] ) for pa,ar in info_by_pack_arch : info_pack=info_by_pack_arch[ (pa,ar) ] assert(info_pack) #delete deltas that are useless if CLEAN_DELTAS : if (pa,ar) in old_deltas_by_pack_arch : versions=[(o['Version']) for o in info_pack if (o['Label'] == 'CMDLINE')] timegrace=time.time() - CLEAN_DELTAS_MTIME * 24 * 3600 #DEBUG print pa,ar,versions for f_d,o_d,n_d in old_deltas_by_pack_arch[ (pa,ar) ] : if n_d not in versions : if os.path.exists(f_d): if os.stat(f_d)[ST_MTIME] < timegrace: if VERBOSE: print ' Removing: ',f_d if ACT: os.unlink(f_d) elif VERBOSE > 1 : print ' Obsolete but still young, graced: ',f_d else: print 'bellandata',f_d del versions elif DEBUG > 1: print 'No deltas where found for: ',pa,ar if N_DELTAS != None and 0 == N_DELTAS: #debdeltas was invoked just to clean the archive continue if all([('CMDLINE' != a['Label']) for a in info_pack ]): #this happens a lot, e.g. when we are scanning non-free/Packages.gz, # all free packages in the trash are to be ignored if DEBUG > 2 or VERBOSE > 3 : print ' No cmdline for:',pa,ar continue #do we need the versions of --alt packages ? seems not # for a in info_pack: # #scan all lazy packages # if 'Version' not in a: # assert a['Label'] == 'ALT' # scan_deb_bydict(a) info_pack.sort(order_by_version) how_many= len(info_pack) if how_many <= 1 : if VERBOSE > 3 : print ' Only one version: ', pa,ar continue newest = how_many -1 while newest >= 0 : new=info_pack[newest] if new['Label'] != 'CMDLINE' : if VERBOSE > 3 : print ' Newest version deb was not in cmdline, skip down one: ', new else: break newest -= 1 if newest <= 0 : if VERBOSE > 3 : print ' No older versions: ', new continue if not go_fishing(new,[]): print 'Error, cannot locate new file: ',new continue if VERBOSE > 2 : print ' Package: ',repr(pa),repr(ar),' Versions:',[ (o.get('Version'),o['Label'],o.get('Packages'),o.get('File')) for o in info_pack] newdebsize=os.path.getsize(new['File']) #very small packages cannot be effectively delta-ed if newdebsize <= MIN_DEB_SIZE : if VERBOSE > 1 : print ' Skip , too small: ', new['File'] continue oldn = newest generated=0 seen_versions=[] while (oldn>0) : oldn -= 1 old=info_pack[oldn] if old['Label'] != 'OLD': if VERBOSE > 2 : print ' Not old, skip:',old continue if old['Version'] == new['Version'] : if VERBOSE > 3 and old != new : print ' Identical versions: ', old, new continue assert( old['Package'] == pa and pa == new['Package'] ) deltabasename = pa +'_'+ version_mangle(old['Version']) +\ '_'+ version_mangle(new['Version']) +'_'+ar+'.debdelta' if 'Filename' in new: deltadirname=delta_dirname(os.path.dirname(new['Filename']),DIR) elif 'File' in new: deltadirname=delta_dirname(os.path.dirname(new['File']),DIR) else: assert(0) if deltadirname == '' : deltadirname = '.' if not os.path.exists(deltadirname): #FIXME this does not respect --no-act os.makedirs(deltadirname) delta=os.path.join(deltadirname,deltabasename) free=freespace(deltadirname) if free and (free < (newdebsize /2 + 2**15)) : if VERBOSE : print ' '+(_("Not enough disk space for storing `%s'.") % delta) continue generated+=1 #count also those already generated if N_DELTAS != None and (generated > N_DELTAS): continue if os.path.exists(delta): if VERBOSE > 1 : print ' Skip , already exists: ',delta continue if old['Package'] in seen_versions: if VERBOSE > 3 : print ' Skip , already considered: ',delta continue seen_versions.append(old['Package']) if os.path.exists(delta+'-too-big'): if VERBOSE > 1 : print ' Skip , tried and too big: ',delta continue if os.path.exists(delta+'-fails'): if VERBOSE > 1 : print ' Skip , tried and fails: ',delta continue if not go_fishing(old,info_pack): print 'Error, cannot locate old file: ',old continue if not ACT: print 'Would create:',delta continue if VERBOSE : print 'Creating:',delta deltatmp=delta+'_tmp_' ret= None tdir=tempo() forensicfile=None if FORENSICDIR: if 'Filename' in new: forensicdirname=delta_dirname(os.path.dirname(new['Filename']),FORENSICDIR) elif 'File' in new: forensicdirname=delta_dirname(os.path.dirname(new['File']),FORENSICDIR) else: assert(0) if not os.path.exists(forensicdirname): #FIXME this does not respect --no-act os.makedirs(forensicdirname) forensicbasename = pa +'_'+ version_mangle(old['Version']) +'_'+ar+'.forensic' a=os.path.join(forensicdirname,forensicbasename) if not os.path.exists(a): forensicfile=open(a,'w') del a try: ret=do_delta_(old['File'],new['File'], deltatmp, TD=tdir, forensic_file=forensicfile) (deltatmp_, percent, elaps, info_delta, gpg_hashes) = ret except KeyboardInterrupt: if os.path.exists(deltatmp): os.unlink(deltatmp) rmtree(tdir) raise except DebDeltaError,s: if not VERBOSE : print _('Creating:'),delta print ' Creation of delta failed, reason: ',str(s) if os.path.exists(deltatmp) and DEBUG == 0: os.unlink(deltatmp) if not s.retriable : open(delta+'-fails','w').close() exitstatus=max(exitstatus, s.exitcode) ret = None except: exitstatus=4 if os.path.exists(deltatmp): os.unlink(deltatmp) puke( " *** Error while creating delta "+delta) open(delta+'-fails','w').close() ret = None rmtree(tdir) if ret == None: continue info_delta.append('ServerID: '+HOSTID) info_delta.append('ServerBogomips: '+str(BOGOMIPS)) if MAX_DELTA_PERCENT and percent > MAX_DELTA_PERCENT: os.unlink(deltatmp) if VERBOSE : print ' Warning, too big!' open(delta+'-too-big','w').close() continue if DO_TEST : ##patch test pret=None try: #test, ignoring gpg, that is added later on pret=do_patch(deltatmp,old['File'],None , info=info_delta, do_gpg=None) except DebDeltaError,s: print ' '+_('Error: testing of delta failed:')+' '+str(s) if os.path.exists(deltatmp) and DEBUG==0: os.unlink(deltatmp) if not s.retriable : open(delta+'-fails','w').close() except KeyboardInterrupt: if os.path.exists(deltatmp): os.unlink(deltatmp) raise except Exception,s: exitstatus=max(exitstatus,4) puke(" *** Error while testing delta "+delta,s) if os.path.exists(deltatmp) and DEBUG==0: os.unlink(deltatmp) open(delta+'-fails','w').close() if pret == None: continue (newdeb_,p_elaps)=pret info_delta.append('PatchTime: %.2f' % p_elaps) ##end patch test #complete the delta file prepending to it the info try: hashes_info=append_info(deltatmp,info_delta) # sign the delta if DO_GPG: gpg_hashes['info']=hashes_info sign_delta(deltatmp,gpg_hashes) except: puke('debdeltas') if os.path.exists(deltatmp): os.unlink(deltatmp) #eventually, put in its place os.rename(deltatmp,delta) #delete debs in --alt that are too old #if CLEAN_ALT: # while oldn>=0: # old=info_pack[l] # if old['Label'] == 'ALT': # f=old['File'] # if os.path.exists(f): # if VERBOSE : print ' Removing alt deb: ',f # if ACT: os.unlink(f) # oldn-=1 if VERBOSE : print ' '+ ( _('Total running time: %.1f') % ( -start_time + time.time())) return exitstatus ##################################################### delta-upgrade class Predictor: package_stats = None upgrade_stats = None def __init__(self): import shelve #self.shelve=shelve if os.getuid() == 0: basedir='/var/lib/debdelta' else: if not os.path.exists(os.path.expanduser('~/')): print '(home directory does not exists, Predictor disabled)' return basedir=os.path.expanduser('~/.debdelta') s=os.path.join(basedir,'upgrade.db') if not os.path.exists(basedir): print 'Creating:',basedir os.makedirs(basedir) self.upgrade_stats=shelve.open(s,flag='c') s=os.path.join(basedir,'packages_stats.db') if os.path.exists(s) or DEBUG > 1 : self.package_stats=shelve.open(s,flag='c') self.patch_time_predictor=self.patch_time_predictor_math ##### predictor for patching time def patch_time_predictor_simple(self,p): if 'ServerBogomips' in p and 'PatchTime' in p: return (float(p[ 'PatchTime']) / BOGOMIPS * float(p['ServerBogomips']) ) else: return None def update(self,p,t): #save delta info if self.package_stats != None : n=p['NEW/Package'] d=copy(p) d['LocalDeltaTime']=t try: self.package_stats[n]=d except Exception,exc: print 'ERROR:Cannot update package_stats:',exc if self.upgrade_stats == None : return s='ServerID' if s not in p : return s=s+':'+p[s] if s not in self.upgrade_stats: r=1 if 'ServerBogomips' in p : r= float(p['ServerBogomips']) / BOGOMIPS try: self.upgrade_stats[s]={ 'PatchSpeedRatio' : r } except Exception,exc: print 'ERROR:Cannot update upgrade_stats:',exc if 'PatchTime' not in p: return ut=float(p[ 'PatchTime']) r=self.upgrade_stats[s]['PatchSpeedRatio'] nr = 0.95 * r + 0.05 * ( t / ut ) a=self.upgrade_stats[s] a['PatchSpeedRatio'] = nr try: self.upgrade_stats[s]=a except Exception,exc: print 'ERROR:Cannot update upgrade_stats:',exc if VERBOSE > 1 : print ' Upstream ',ut,'PatchSpeedRatio from ',r,' to ',nr print self.upgrade_stats[s]['PatchSpeedRatio'] def patch_time_predictor_math(self,p): "Predicts time to patch." if 'PatchTime' not in p: return None ut=float(p[ 'PatchTime']) # s='ServerID' if s not in p : return self.patch_time_predictor_simple(p) s=s+':'+p[s] if s not in self.upgrade_stats: return self.patch_time_predictor_simple(p) r=self.upgrade_stats[s]['PatchSpeedRatio'] return r * ut def delta_uri_from_config(config, **dictio): secs=config.sections() for s in secs: opt=config.options(s) if 'delta_uri' not in opt: raise DebDeltaError('sources.conf section '+repr(s)+'does not contain delta_uri',exitcode=3) match=True for a in dictio: #damn it, ConfigParser changes everything to lowercase ! if ( a.lower() in opt ) and ( dictio[a] != config.get( s, a) ) : #print '!!',a, repr(dictio[a]) , ' != ',repr(config.get( s, a)) match=False break if match: return config.get( s, 'delta_uri' ) if VERBOSE : print ' '+(_("(sources.conf does not provide a server for `%s')") % repr(dictio['PackageName'])) def delta_upgrade_(args): # a list of all error exitcodes that derive from downloading and applying mainexitcodes = [0] original_cwd = os.getcwd() import thread, threading, Queue, urllib2, fcntl, atexit, signal proxies=urllib2.getproxies() if VERBOSE and proxies: #note that this message is indented, I dont know what's best in translations print _(' Proxy settings detected in the environment; using "urllib2" for downloading; but\n this disables some features and is in general slower and buggier. See man page.') #for example, urllib2 transforms http response "401" into "404" , and "302" into "200" config=ConfigParser.SafeConfigParser() a=config.read(['/etc/debdelta/sources.conf', expanduser('~/.debdelta/sources.conf') ]) # FIXME this does not work as documented in Python #if VERBOSE > 1 : print 'Read config files: ',repr(a) import warnings warnings.simplefilter("ignore",FutureWarning) if DO_PROGRESS: sys.stderr.write(string.ljust(_('Initializing APT cache...'),terminalcolumns)+'\r') elif VERBOSE: print _('Initializing APT cache...'), try: import apt_pkg except ImportError: raise DebDeltaError('python module "apt_pkg" is missing. Please install python-apt',True) try: import apt except ImportError: raise DebDeltaError('python module "apt" is missing. Please install a newer version of python-apt (newer than 0.6.12).',True) apt_pkg.init() #from apt import SizeToStr ## Return a string describing the size in a human-readable manner using ## SI prefix and base-10 units, e.g. '1k' for 1000, '1M' for 1000000, etc. def SizeToKibiStr(a): "this uses kibibytes (altough the program prints them as kilobytes)" if a < 8096 : return str(int(a))+'B' elif a < 8096*1024: return str(int(a/1024))+'kB' else: return str(int(a/1024/1024))+'MB' if DO_PROGRESS: sys.stderr.write(string.ljust(_('Upgrading APT cache...'),terminalcolumns)+'\r') elif VERBOSE: print _('upgrading...'), cache=apt.Cache() try: cache.upgrade(True) except: if DO_PROGRESS: sys.stderr.write(string.ljust(_('Failed! Safe upgrading APT cache...'),terminalcolumns)+'\r') elif VERBOSE: print _('failed! trying safe-upgrade...') cache.upgrade(False) if DO_PROGRESS: sys.stderr.write(string.ljust(_('Upgraded APT cache.'),terminalcolumns)+'\r') elif VERBOSE: print _('done.') diversions=scan_diversions() if DIR == None: if os.getuid() == 0: DEB_DIR='/var/cache/apt/archives' else: DEB_DIR='/tmp/archives' else: DEB_DIR=DIR if not os.path.exists(DEB_DIR): os.mkdir(DEB_DIR) if not os.path.exists(DEB_DIR+'/partial'): os.mkdir(DEB_DIR+'/partial') try: ##APT does (according to strace) #open("/var/cache/apt/archives/lock", O_RDWR|O_CREAT|O_TRUNC, 0640) = 17 #fcntl64(17, F_SETFD, FD_CLOEXEC) = 0 #fcntl64(17, F_SETLK, {type=F_WRLCK, whence=SEEK_SET, start=0, len=0}) = 0 ##so a=os.open(DEB_DIR+'/lock', os.O_RDWR | os.O_TRUNC | os.O_CREAT, 0640) fcntl.fcntl(a, fcntl.F_SETFD, fcntl.FD_CLOEXEC) # synopsis lockf( fd, operation, [length, [start, [whence]]]) fcntl.lockf(a, fcntl.LOCK_EX | fcntl.LOCK_NB, 0,0,0) except IOError, s: from errno import EAGAIN if s.errno == EAGAIN : a=' already locked!' else: a=str(s) if DEB_DIR == '/var/cache/apt/archives' : a=a+' (is APT running?)' raise DebDeltaError('could not lock dir: '+DEB_DIR+' '+a, retriable=True) if VERBOSE or DEB_DIR != '/var/cache/apt/archives' : print _('Recreated debs are saved in the directory %s') % DEB_DIR #these are the packages that do not have a delta no_delta = [] total_time = -time.time() ##### predictor for patching time if DO_PREDICTOR: predictor = Predictor() #this is a dictonary (key is package name) of parameters of deltas #(to add some math in the future) params_of_delta={} patching_queue=Queue.Queue() thread_returns={} ######################## thread_do_patch def thread_do_patch(que, no_delta, returns, exitcodes, forensics): if VERBOSE > 1 : print ' Patching thread started. ' debs_size=0 debs_time=0 while 1: a = que.get() if a == None: break (name, delta , newdeb, deb_uri) = a debs_time -= time.time() TD=tempo() if not ACT: print 'Would create: ',newdeb,' ' else: if VERBOSE >= 2 : print ' Now patching for: ',name try: #start_time=time.time() returns['patchname']=os.path.basename(newdeb) ret=do_patch_(delta, '/', newdeb , TD, returns, diversions=diversions, do_progress=False) del returns['patchname'] l = os.path.getsize(newdeb) #a=time.time() - start_time a=ret[1] #dear translator, please align this line with the line saying 'Downloaded, ...' msg=_("Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s") msgdata={'time' : a, 'speed' : SizeToKibiStr(l / (a+0.001)), 'name' : os.path.basename(newdeb)} if DO_PROGRESS: sys.stderr.write(string.ljust(msg % msgdata, terminalcolumns)+'\n') else: print msg % msgdata except KeyboardInterrupt: thread.interrupt_main() rmtree(TD) return except DebDeltaError,s: puke(' Error: applying of delta for '+name+' failed: ',s) if 'e' in DEB_POLICY: no_delta.append( (deb_uri, newdeb) ) elif VERBOSE > 1 : print ' No deb-policy "e", no download of ',deb_uri forensics.append(s.logs) exitcodes.append(s.exitcode) except: if puke == None: return puke( " *** Error while applying delta for "+name+": ") if 'e' in DEB_POLICY: no_delta.append( (deb_uri, newdeb) ) elif VERBOSE > 1 : print ' No deb-policy "e", no download of ',deb_uri exitcodes.append(4) else: if name in params_of_delta : p= params_of_delta[name] name,elaps=ret if DO_PREDICTOR: predictor.update(p,elaps) if VERBOSE > 1 : t=predictor.patch_time_predictor(p) if t: print ' (Predicted %.3f sec )' % t debs_size += os.path.getsize(newdeb) if os.path.exists(delta): os.unlink(delta) rmtree(TD) debs_time += time.time() returns['debs_size']=debs_size returns['debs_time']=debs_time if VERBOSE > 1 : print ' Patching thread ended , bye bye. ' ##################################### def progress_string(statusdb): download='' if 'downloaduri' in statusdb: download="D %2d%% (%4s/s) %s " % \ (statusdb.get('downloadprogress',-1), statusdb.get('downloadspeed','-'), statusdb['downloaduri']) patch='' if 'patchname' in statusdb: patch='P %2d%% %s'% (statusdb.get('patchprogress',-1), statusdb['patchname']) if terminalcolumns == None: return download+' ; '+patch if not patch: return download[:terminalcolumns] if not download: return patch[:terminalcolumns] ld=len(download) lp=len(patch) b=ld + lp if b < terminalcolumns - 3 : return download+' ; '+patch a=float(terminalcolumns-5)/float(b) ld=int(ld*a) lp=int(lp*a) return download[:ld] + ' ; ' + patch[:lp] + '..' ######################################### import socket, httplib from urlparse import urlparse, urlunparse #################### manage connections #keeps a cache of all connections, by URL http_conns={} def conn_by_url(url): url=urlparse(url)[1] if url not in http_conns: if VERBOSE > 1 : print ' Opening connection to: ',url http_conns[url] = httplib.HTTPConnection(url, timeout=15) return http_conns[url] def conn_close(url,fatal=False): url=urlparse(url)[1] conn=http_conns.get(url) if fatal: http_conns[url] = None else: del http_conns[url] if conn != None : if VERBOSE > 1 : print ' Closing connection to: ',url conn.close() #### def _connect(uri, headers): "connects for a GET ; returns (filetype, statuscode, servermessage, getheaders)" uri_p=urlparse(uri) if uri_p.scheme == 'http' and not proxies: #use persistent http connections conn=conn_by_url(uri) if conn == None : return None, None, None, None try: conn.request("GET", urllib2.quote(uri_p[2]),headers=headers) r = conn.getresponse() return r, r.status, r.reason, r.msg except (httplib.HTTPException, socket.error),e: if VERBOSE : puke( ' Connection error (retrying): ',uri_p[1]) conn_close(uri) try: conn=conn_by_url(uri) conn.request("GET", urllib2.quote(uri_p[2]),headers=headers) r = conn.getresponse() return r, r.status, r.reason, r.msg except (httplib.HTTPException, socket.error),e: puke( 'Connection error (fatal): ',uri_p[1]) mainexitcodes.append(1) try: conn_close(uri,fatal=True) except: pass mainexitcodes.append(1) return e, None, None, None else: #use urllib2 try: if uri_p.scheme == 'http': a=[copy(z) for z in uri_p] a[2]=urllib2.quote(uri_p[2]) url=urlunparse(a) req = urllib2.Request(url, headers=headers) r = urllib2.urlopen(req) #print r.info(),dir(r),r.code return r, getattr(r,'code',None), getattr(r,'msg','(no message)'), r.info() except urllib2.HTTPError,e: return e.code, None, None, None except (httplib.HTTPException,socket.error,urllib2.URLError),e: puke( 'Connection error (fatal)',uri) mainexitcodes.append(1) return e, None, None, None ################################################# various HTTP facilities def _parse_ContentRange(s): #bytes 0-1023/25328 if not s or s[:6] != "bytes " : print "Malformed Content-Range",s return a=s[6:].split('/') if len(a) != 2 : print "Malformed Content-Range",s return b=a[0].split('-') if len(b) != 2 : print "Malformed Content-Range",s return return int(b[0]),int(b[1]),int(a[1]) ###################################### test_uri def test_uri(uri): conn=conn_by_url(uri) if conn == None: return None uri_p=urlparse(uri) assert(uri_p[0] == 'http') conn.request("HEAD", urllib2.quote(uri_p[2]),headers=HTTP_USER_AGENT) r = conn.getresponse() r.read() r.close() return r.status ###################################### download_10k_uri def download_10k_uri(uri,outname): "in case of connection error, returns the (error, None, None, None) ; otherwise returns (status,len,outname,complete)" #download uri_p=urlparse(uri) assert(uri_p[0] == 'http') re=copy(HTTP_USER_AGENT) re["Range"] = "bytes=0-10239" complete=False r, status, msg, responseheaders=_connect(uri, re) if not hasattr(r,'read') and responseheaders==None: return r, None, None, None if status == 206: outnametemp=os.path.join(os.path.dirname(outname),'partial',os.path.basename(outname)) try: l = _parse_ContentRange(responseheaders['Content-Range'])[2] except (KeyError, ValueError): l = None elif status == 200: outnametemp=outname complete=True try: l=long(responseheaders.get('Content-Length')) except: l=None else: #FIXME how do we deal with a FTP mirror of deltas ? r.read() r.close() return status, None, None, None s=r.read() r.close() #(maybe I did not understand the status 206 ?) if not complete and l != None and len(s) >= l: outnametemp=outname complete=True if os.path.exists(outnametemp) and os.path.getsize(outnametemp) >= len(s) : #do not truncate preexisting file complete=(os.path.getsize(outnametemp) >= l) return status, outnametemp, l, complete out=open(outnametemp,'w') out.write(s) out.close() return status, outnametemp, l, complete ###################################### download_uri def download_uri(uri, outname, conn_time, len_downloaded, statusdb): outnametemp=os.path.join(os.path.dirname(outname),'partial',os.path.basename(outname)) re=copy(HTTP_USER_AGENT) #content range l=None if os.path.exists(outnametemp): #shamelessly adapted from APT, methods/http.cc s=os.stat(outnametemp) l=s[ST_SIZE] #t=s[ST_MTIME] ### unfortunately these do not yet work #thank god for http://docs.python.org/lib/module-time.html #actually APT does #t=time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(t)) ##re["If-Range"] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(t)) ####re["If-Range"] = time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime(t)) re["Range"] = "bytes=%li-" % ( (long(l)-1) ) #start downloading start_time=time.time() r, status, message, responseheaders = _connect(uri, re) if not hasattr(r,'read') and responseheaders==None: return if not ( status == None or status == 200 or ( status == 206 and l != None ) ): if VERBOSE : print 'Connection problem, status:'+str(status)+' msg:'+str(message)+' uri:'+str(uri) r.read() r.close() return #print 'ooK Content-Range', r.getheader('Content-Range') #HACK if l and status == 200 : print ' Hmmm... our HTTP range request failed, ',repr(re),status,message if status == 200 : out=open(outnametemp,'w') try: total_len = long(responseheaders['Content-Length']) except (KeyError,ValueError): total_len=None elif status == 206 : #APT does scanf of "bytes %lu-%*u/%lu",&StartPos,&Size #first-byte-pos "-" last-byte-pos "/" instance-length out=open(outnametemp,'a') try: a,b,total_len =_parse_ContentRange(responseheaders['Content-Range']) except (KeyError,ValueError),e: sys.stderr.write('! problem, http response [206], Content Range %s , error %s , uri %s\n' %\ (responseheaders.get('Content-Range'),e,uri)) return out.seek(a) out.truncate() else: out=open(outnametemp,'w') try: total_len=long(responseheaders.get('Content-length')) except ValueError: total_len=None free=freespace(os.path.dirname(outname)) if total_len != None and free and (free + 2**14 ) < total_len : print _('Not enough disk space to download:')+' '+os.path.basename(uri) r.read() r.close() mainexitcodes.append(1) return j=0 s=r.read(1024) while s and (total_len == None or out.tell() < total_len ): j+=len(s) out.write(s) if total_len: statusdb['downloadprogress']=99.9 * out.tell() / total_len a=time.time() + conn_time - start_time if a > 0.5 : statusdb['downloadspeed']=SizeToKibiStr(float(j+len_downloaded)/a) s=r.read(1024) out.close() r.close() #end of download a=time.time() - start_time #if total_len == None: # total_len = os.path.getsize(outnametemp) #dear translator, please align this line with the line saying 'Created, ...' msg=_("Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s") msgdata={'time' : a , 'speed' : SizeToKibiStr(j / (a+0.001)) , 'name' : os.path.basename(uri)} if DO_PROGRESS: sys.stderr.write(string.ljust(msg % msgdata, terminalcolumns)+'\n') else: print msg % msgdata os.rename(outnametemp,outname) #FIXME this is incorrect by 1024 bytes return conn_time + a, (j+len_downloaded) ###################################### end of HTTP stuff ################### start patching thread forensics=[] patching_thread=threading.Thread( target=thread_do_patch , args=(patching_queue, no_delta, thread_returns, mainexitcodes, forensics) ) patching_thread.daemon=True patching_thread.start() ################### first merry-go-round deltas_down_size=0 deltas_down_time=0 #this is a list of tuples of ..... available_deltas=[] not_available_deltas=[] if hasattr(apt.package.Package,'is_installed'): is_installed=lambda p : p.is_installed elif hasattr(apt.package.Package,'isInstalled'): is_installed=lambda p : p.isInstalled else: assert 0 if hasattr(apt.package.Package,'marked_upgrade'): marked_upgrade=lambda p : p.marked_upgrade elif hasattr(apt.package.Package,'markedUpgrade'): marked_upgrade=lambda p : p.markedUpgrade else: assert 0 progress_count=0 ## first merry-go-round, use package cache to fill available_deltas, download 10kB of each delta for p in cache : #print progress if DO_PROGRESS : progress_count+=1 if 0 == (progress_count & 63): sys.stderr.write(string.ljust('%2.1f%%' % (float(progress_count) * 100.0 / len(cache)), terminalcolumns) + '\r') if is_installed(p) and marked_upgrade(p) : if args and p.name not in args: continue #thanks a lot to Julian Andres Klode candidate=p.candidate origin = p.candidate.origins[0] arch=candidate.architecture deb_uri=candidate.uri installed_version=p.installed.version candidate_version=p.candidate.version deb_path=string.split(deb_uri,'/') try: thepoolindex=deb_path.index('pool') except ValueError: sys.stderr.write('! Package "%s" (version %s) does not have "pool" in the uri %s \n' % (p.name, candidate_version, deb_uri)) continue deb_path=string.join(deb_path[(thepoolindex):],'/') #try all possible variants of the filename newdebs=[p.name+'_'+candidate_version+'_'+arch+'.deb', os.path.basename(deb_uri)] if ':' in candidate_version: a=candidate_version.split(':') newdebs.append(p.name+'_'+a[1]+'_'+arch+'.deb') newdebs.append(p.name+'_'+a[0]+'%3A'+a[1]+'_'+arch+'.deb') newdebs.append(p.name+'_'+a[0]+'%3a'+a[1]+'_'+arch+'.deb') for newdeb in newdebs: if os.path.exists(DEB_DIR+'/'+newdeb) or \ os.path.exists('/var/cache/apt/archives/'+newdeb): if VERBOSE > 1 : print ' Already downloaded: ',p.name,candidate_version newdeb=None break if newdeb == None: continue newdeb = DEB_DIR+'/'+newdebs[-1] if VERBOSE > 1 : print ' Looking for a delta for %s from %s to %s ' % ( p.name, installed_version, candidate_version ) delta_uri_base=delta_uri_from_config(config, Origin=origin.origin, Label=origin.label, Site=origin.site, Archive=origin.archive, PackageName=p.name) if delta_uri_base == None: if 's' in DEB_POLICY: no_delta.append( (deb_uri, newdeb) ) continue a=urlparse(delta_uri_base) assert(a[0] == 'http') #delta name delta_name=p.name.split(':')[0]+'_'+version_mangle(installed_version)+\ '_'+ version_mangle(candidate_version)+'_'+\ arch+'.debdelta' uri=delta_uri_base+'/'+os.path.dirname(deb_path)+'/'+delta_name #download first part of delta abs_delta_name= DEB_DIR+'/'+delta_name #maybe it is already here if os.path.exists(abs_delta_name): a = abs_delta_name else: a = DEB_DIR+'/partial/'+delta_name if not os.path.exists(a): a = None if a: l=os.path.getsize(a) if VERBOSE > 1 : print ' Already here: ',abs_delta_name s=get_info_fast(a) if s: params_of_delta[p.name]=info_2_db(s) available_deltas.append( (l, p.name, uri, abs_delta_name , newdeb, deb_uri, a, True) ) continue #if not, download its first part if DO_PROGRESS: sys.stderr.write(string.ljust('%2.1f%% ' % (float(progress_count) * 100.0 / len(cache)) + \ _('Downloading head of %s...') % p.name, terminalcolumns) + '\r') deltas_down_time-=time.time() status, tempname, l, complete = download_10k_uri(uri,abs_delta_name) deltas_down_time+=time.time() #some strange error in remote server? #FIXME this does not support ftp delta repositories if status != 200 and status != 206 and status != 404: print 'Delta is not downloadable (%s %s):%s' %\ (status,httplib.responses.get(status), uri) continue if status == 404: not_available_deltas.append(p.name) if uri[:7] == 'http://' and not proxies: # FIXME support ftp or proxies bigrstatus = test_uri(uri+'-too-big') else: bigrstatus = None if bigrstatus == 200 : print _('Delta is too big:')+' '+delta_name if 'b' in DEB_POLICY: no_delta.append( (deb_uri, newdeb) ) elif VERBOSE > 1 : print ' No deb-policy "b", no download of ',deb_uri else: if DEBUG and VERBOSE: print _('Delta is not present:')+' '+uri else: print _('Delta is not present:')+' '+delta_name if 'u' in DEB_POLICY: no_delta.append( (deb_uri, newdeb) ) elif VERBOSE > 1 : print ' No deb-policy "u", no download of ',deb_uri continue if VERBOSE > 1: print 'Delta is present:', delta_name, tempname,' ' elif DO_PROGRESS: sys.stderr.write(string.ljust('%2.1f%%' % (float(progress_count) * 100.0 / len(cache)) + \ _('Downloaded head of %s.') % p.name, terminalcolumns) + '\r') if os.path.isfile(tempname): deltas_down_size+=os.path.getsize(tempname) #parse file and save info try: s=get_info_fast(tempname) except DebDeltaError,e: sys.stderr.write("!!"+str(e)+'\n') sys.stderr.write("!! (renamed to "+tempname+'~~NOT~A~DELTA~~ )\n') os.rename(tempname,tempname+'~~NOT~A~DELTA~~') if proxies: sys.stderr.write("!!maybe a proxy is returning an error page??\n") else: sys.stderr.write("!!damaged delta??\n") continue if s: params_of_delta[p.name]=info_2_db(s) s=patch_check_tmp_space(params_of_delta[p.name], '/') if s != True: print p.name,' : sorry '+s #neither download deb nor delta.. #the user may wish to free space and retry continue #FIXME may check that parameters are conformant to what we expect if complete: patching_queue.put( (p.name, abs_delta_name ,newdeb, deb_uri ) ) else: available_deltas.append( (l, p.name, uri, abs_delta_name , newdeb, deb_uri, tempname, complete) ) ## end of first merry-go-round available_deltas.sort() if DEBUG or VERBOSE: if DO_PROGRESS: sys.stderr.write(' ' * (terminalcolumns) +'\r') print ' '+ _('Deltas: %(present)d present and %(absent)d not,') % \ {'present' : len(available_deltas), 'absent' : len(not_available_deltas)} print ' '+ _('downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec.') % \ {'time' : deltas_down_time, 'size' : SizeToKibiStr(deltas_down_size), 'speed' : SizeToKibiStr(deltas_down_size/float(deltas_down_time+0.001))} if available_deltas: print ' '+_('Need to get %s of deltas.') % SizeToKibiStr(sum([a[0] for a in available_deltas])) ###################### start progress thread def print_progress(common_db): while sys and 'STOP' not in common_db: sys.stderr.write(progress_string(common_db)+'\r') time.sleep(0.2) if DO_PROGRESS and terminalcolumns > 4: progress_thread=threading.Thread(target=print_progress, args=( thread_returns, ) ) progress_thread.daemon=True progress_thread.start() else: progress_thread=None ################ second merry-go-round, download rest of available deltas , queue them for delta_len, name, uri, abs_delta_name , newdeb, deb_uri, tempname, complete in available_deltas : ## this seems to create problems.... #if not os.path.exists(abs_delta_name) and os.path.exists(tempname) and os.path.getsize(tempname) == delta_len: # print 'Just rename:',name #this actually should never happen, but , who knows... # os.rename(tempname,abs_delta_name) # tempname=abs_delta_name if name in params_of_delta: s=patch_check_tmp_space(params_of_delta[name], '/') if s != True: print name,' : sorry, '+s #argh, we ran out of space in meantime continue if not os.path.exists(abs_delta_name): thread_returns['downloaduri']=os.path.basename(uri) r=download_uri(uri, abs_delta_name, deltas_down_time, deltas_down_size, thread_returns) del thread_returns['downloaduri'] if r == None or isinstance(r, httplib.HTTPException) : if VERBOSE : print ' '+_('You may wish to rerun, to get also:')+' '+uri continue else: deltas_down_time = r[0] deltas_down_size = r[1] #queue to apply delta if os.path.exists(abs_delta_name): #append to queue patching_queue.put( (name, abs_delta_name ,newdeb, deb_uri ) ) else: if 'u' in DEB_POLICY: no_delta.append( (deb_uri, newdeb) ) elif VERBOSE > 1 : print ' No deb-policy "u", no download of ',deb_uri ## end of second merry-go-round #terminate queue patching_queue.put(None) #do something useful in the meantime debs_down_size=0 debs_down_time=0 if patching_thread.isAlive() and no_delta and VERBOSE > 1 : print ' Downloading deltas done, downloading debs while waiting for patching thread.' while patching_thread.isAlive() or ('a' in DEB_POLICY and no_delta): if no_delta: uri, newdeb = no_delta.pop() thread_returns['downloaduri']=os.path.basename(uri) r=download_uri(uri, newdeb, debs_down_time, debs_down_size, thread_returns) del thread_returns['downloaduri'] if r == None or isinstance(r, httplib.HTTPException) : if VERBOSE : print ' You may wish to rerun, to get also: ',uri continue if r: debs_down_time = r[0] debs_down_size = r[1] if not no_delta : time.sleep(0.1) for i in http_conns: if http_conns[i] != None : http_conns[i].close() while patching_thread.isAlive(): time.sleep(0.1) #terminate progress report thread_returns['STOP']=True while progress_thread != None and progress_thread.isAlive(): time.sleep(0.1) if DO_PROGRESS: sys.stderr.write(' ' * terminalcolumns +'\r') total_time += time.time() print _('Delta-upgrade statistics:') msg=_("size %(size)s time %(time)dsec speed %(speed)s/sec") if VERBOSE: if deltas_down_time : a=float(deltas_down_size) t=deltas_down_time print ' ' + _('downloaded deltas, ') + msg % \ {'size' : SizeToKibiStr(a), 'time' : int(t), 'speed' : SizeToKibiStr(a / t )} if thread_returns['debs_time'] : a=float(thread_returns['debs_size']) t=thread_returns['debs_time'] print ' ' + _('patching to debs, ') + msg % \ {'size' : SizeToKibiStr(a), 'time' : int(t), 'speed' : SizeToKibiStr(a / t )} if debs_down_time : a=float(debs_down_size) t=debs_down_time print ' ' + _('downloaded debs, ') + msg % \ {'size' : SizeToKibiStr(a), 'time' : int(t), 'speed' : SizeToKibiStr(a / t )} if total_time: a=float(debs_down_size + thread_returns['debs_size']) t=total_time print ' ' + _('total resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/sec') % \ {'size' : SizeToKibiStr(a), 'time' : int(t), 'speed' : SizeToKibiStr(a / t )} if forensics: forensic_send(forensics) return max(mainexitcodes) ################################################# main program, do stuff def act(): "fake function that marks where the action starts" pass if action == 'patch': if INFO : if len(argv) > 1 and VERBOSE : sys.stderr.write(' (printing info - extra arguments are ignored)\n') elif len(argv) == 0 : sys.stderr.write(_('Need a filename; try --help.')+'\n') raise SystemExit(3) try: delta=abspath(argv[0]) check_is_delta(delta) info=get_info(delta) for s in info: if s: print ' info: ',s except KeyboardInterrupt: puke('debpatch exited by keyboard interrupt') raise SystemExit(5) except DebDeltaError,s: puke('debpatch',s) raise SystemExit(s.exitcode) except Exception,s: puke("debpatch",s) raise SystemExit(4) raise SystemExit(0) #really patch if len(argv) != 3 : sys.stderr.write(_('Need 3 filenames; try --help.')+'\n') raise SystemExit(3) newdeb=abspath(argv[2]) if newdeb == '/dev/null': newdeb = None try: do_patch(abspath(argv[0]), abspath(argv[1]), newdeb) except KeyboardInterrupt: puke('debpatch exited by keyboard interrupt') raise SystemExit(5) except DebDeltaError,s: puke('debpatch',s) if s.logs: forensic_send([s.logs]) raise SystemExit(s.exitcode) except Exception,s: puke('debpatch',s) raise SystemExit(4) raise SystemExit(0) elif action == 'delta' : if len(argv) != 3 : sys.stderr.write(_('Need 3 filenames; try --help.')+'\n') raise SystemExit(3) delta=abspath(argv[2]) try: r = do_delta(abspath(argv[0]), abspath(argv[1]), delta) except KeyboardInterrupt: puke('debdelta exited by keyboard interrupt') raise SystemExit(5) except DebDeltaError,s: puke('debdelta',s) raise SystemExit(s.exitcode) except Exception, s: puke('debdelta',s) raise SystemExit(4) raise SystemExit(0) elif action == 'deltas' : for v in argv: if not (os.path.isfile(v) or os.path.isdir(v)) : sys.stderr.write(_("Error: argument is not a directory or a regular file:")+' '+v) raise SystemExit(3) try: exitcode=do_deltas(argv) except KeyboardInterrupt: puke('debdeltas exited by keyboard interrupt') raise SystemExit(5) except DebDeltaError,s: puke('debdeltas',s) raise SystemExit(s.exitcode) except Exception,s: puke('debdeltas',s) raise SystemExit(4) raise SystemExit(exitcode) elif action == 'delta-upgrade': import warnings warnings.simplefilter("ignore",FutureWarning) try: exitcode=delta_upgrade_(argv) except KeyboardInterrupt: puke('debdelta-upgrade exited due to keyboard interrupt') raise SystemExit(5) except DebDeltaError,s: puke('debdelta-upgrade',s) raise SystemExit(s.exitcode) except Exception, s: puke('debdelta-upgrade',s) raise SystemExit(4) raise SystemExit(exitcode) elif action == 'patch-url': config=ConfigParser.SafeConfigParser() config.read(['/etc/debdelta/sources.conf', expanduser('~/.debdelta/sources.conf') ]) try: import apt_pkg except ImportError: print 'ERROR!!! python module "apt_pkg" is missing. Please install python-apt' raise SystemExit(1) try: import apt except ImportError: print 'ERROR!!! python module "apt" is missing. Please install a newer version of python-apt (newer than 0.6.12)' raise SystemExit(1) apt_pkg.init() cache=apt.Cache() cache.upgrade(True) for a in argv: print _('Lookup %s') % a p = cache[a] candidate=p.candidate origin = p.candidate.origins[0] arch=candidate.architecture if not candidate.uris : sys.stderr.write(_("Sorry, cannot find an URI to download the debian package of `%s'.") % a) continue deb_uri = candidate.uri installed_version=p.installed.version candidate_version=p.candidate.version deb_path=string.split(deb_uri,'/') deb_path=string.join(deb_path[(deb_path.index('pool')):],'/') delta_uri_base=delta_uri_from_config(config, Origin=origin.origin, Label=origin.label, Site=origin.site, Archive=origin.archive, PackageName=p.name) if delta_uri_base == None: sys.stderr.write(_("Sorry, no source is available to upgrade `%s'.") % a) continue if installed_version == candidate_version: sys.stderr.write(_("Sorry, the package `%s' is already at its newest version.") % a) continue #delta name delta_name=p.name+'_'+version_mangle(installed_version)+\ '_'+ version_mangle(candidate_version)+'_'+\ arch+'.debdelta' uri=delta_uri_base+'/'+os.path.dirname(deb_path)+'/'+delta_name print a+' : '+uri raise SystemExit(0) ##################################################### apt method ### still work in progress if os.path.dirname(sys.argv[0]) == '/usr/lib/apt/methods' : import os,sys, select, fcntl, apt, thread, threading, time apt_cache=apt.Cache() log=open('/tmp/log','a') log.write(' --- here we go\n') ( hi, ho , he) = os.popen3('/usr/lib/apt/methods/http.distrib','b',2) nthreads=3 class cheat_apt_gen: def __init__(self): self.uri=None self.filename=None self.acquire=False def process(self,cmd): if self.uri: self.filename=cmd[10:-1] log.write(' download %s for %s\n' % (repr(self.uri),repr(self.filename))) self.uri=None self.filename=None self.acquire=False return cmd elif self.acquire: self.uri=cmd[5:-1] return cmd elif cmd[:3] == '600' : self.acquire=True else: return cmd def copyin(): bufin='' while 1: #print ' o' s=os.read(ho.fileno(),1) bufin += s if log and bufin and (s == '' or s == '\n') : log.write( ' meth ' +repr(bufin)+'\n' ) bufin='' if s == '': thread.interrupt_main( ) global nthreads if nthreads: nthreads-=1 #log.write( ' in closed \n' ) #return os.write(1,s) def copyerr(): buferr='' while 1: s=os.read(he.fileno(),1) buferr += s if log and buferr and (s == '' or s == '\n') : log.write( ' err ' +repr(buferr)+'\n' ) buferr='' if s == '': thread.interrupt_main( ) global nthreads if nthreads: nthreads-=1 log.write( ' err closed \n' ) #return os.write(2,s) def copyout(): gen=cheat_apt_gen() bufout='' while 1: s=os.read(0,1) bufout += s if log and bufout and (s == '' or s == '\n') : log.write( ' apt ' +repr(bufout)+'\n' ) bufout=gen.process(bufout) bufout='' if s == '': thread.interrupt_main() global nthreads if nthreads: nthreads-=1 #log.write( ' out closed \n' ) #return os.write(hi.fileno(),(s)) tin=thread.start_new_thread(copyin,()) tout=thread.start_new_thread(copyout,()) terr=thread.start_new_thread(copyerr,()) while nthreads>0 : log.write( ' nthreads %d \n' % nthreads ) try: while nthreads>0 : time.sleep(1) except KeyboardInterrupt: pass raise SystemExit(0) debdelta/debdeltas0000777000000000000000000000000012436652141013052 2debdeltaustar debdelta/po/0000755000000000000000000000000012436652141010132 5ustar debdelta/po/messages.pot0000644000000000000000000002356212436652141012475 0ustar # SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the PACKAGE package. # FIRST AUTHOR , YEAR. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2012-08-26 22:24+0200\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=CHARSET\n" "Content-Transfer-Encoding: 8bit\n" #: ../debdelta:30 msgid "" "Usage: debdelta [ option... ] fromfile tofile delta\n" " Computes the difference of two deb files, from fromfile to tofile, and " "writes it to delta\n" "\n" "Options:\n" "--signing-key KEY\n" " gnupg key used to sign the delta\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old deb is " "available\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs\n" msgstr "" #: ../debdelta:45 msgid "" "Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files]\n" " Computes all missing deltas for deb files.\n" " It orders by version number and produce deltas to the newest version\n" "\n" "Options:\n" "--signing-key KEY\n" " key used to sign the deltas (using GnuPG)\n" "--dir DIR force saving of deltas in this DIR\n" " (otherwise they go in the dir of the newer deb_file)\n" "--old ARGS 'Packages' files containing list of old versions of debs\n" "--alt ARGS for any cmdline argument, search for debs also in this place\n" " -n N how many deltas to produce for each deb (default unlimited)\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old .deb is " "available\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs;\n" " possible values are: xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--clean-deltas delete deltas if newer deb is not in archive\n" msgstr "" #: ../debdelta:69 msgid "" "Usage: debpatch [ option... ] delta fromfile tofile \n" " Applies delta to fromfile and produces a reconstructed version of " "tofile.\n" "\n" "(When using 'debpatch' and the old .deb is not available,\n" " use '/' for the fromfile.)\n" "\n" "Usage: debpatch --info delta\n" " Write info on delta.\n" "\n" "Options:\n" "--no-md5 do not verify MD5 (if found in info in delta)\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created deb\n" msgstr "" #: ../debdelta:86 msgid "" "Usage: debdelta-upgrade [package names]\n" " Downloads all deltas and apply them to create the debs\n" " that are needed by 'apt-get upgrade'.\n" "\n" "Options:\n" "--dir DIR directory where to save results\n" "--deb-policy POLICY\n" " policy to decide which debs to download,\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created debs\n" msgstr "" #: ../debdelta:100 msgid "" "Usage: debpatch-url [package names]\n" " Show URL wherefrom to downloads all deltas that may be used to upgrade the " "given package names\n" msgstr "" #: ../debdelta:105 msgid "" " -v verbose (can be added multiple times)\n" "--no-act do not do that (whatever it is!)\n" " -d add extra debugging checks\n" " -k keep temporary files (use for debugging)\n" "--gpg-home HOME\n" " specify a different home for GPG\n" "\n" "See man page for more options and details.\n" msgstr "" #: ../debdelta:279 msgid "Error: argument of --dir is not a directory:" msgstr "" #: ../debdelta:287 msgid "Error: argument of --forensicdir is not a directory:" msgstr "" #: ../debdelta:303 msgid "Error: argument of --alt is not a directory or a regular file:" msgstr "" #: ../debdelta:308 msgid "Error: argument of --old is not a directory or a regular file:" msgstr "" #: ../debdelta:324 #, python-format msgid "Error: --gpg-home `%s' does not exist." msgstr "" #: ../debdelta:328 #, python-format msgid "Error: output format `%s' is unknown." msgstr "" #: ../debdelta:332 #, python-format msgid "Error: option `%s' is unknown, try --help" msgstr "" #: ../debdelta:337 #, python-format msgid "Error: feature `%s' cannot be disabled." msgstr "" #: ../debdelta:672 ../debdelta:682 #, python-format msgid "Error: the file `%s' does not exist." msgstr "" #: ../debdelta:674 ../debdelta:684 #, python-format msgid "Error: `%s' is not a regular file." msgstr "" #: ../debdelta:677 #, python-format msgid "Error: `%s' does not seem to be a Debian package." msgstr "" #: ../debdelta:687 #, python-format msgid "Error: `%s' does not seem to be a Debian delta." msgstr "" #: ../debdelta:984 #, python-format msgid "" "not enough disk space (%(free)dkB) in directory %(dir)s for applying delta " "(needs %(size)dkB)" msgstr "" #: ../debdelta:1246 ../debdelta:1303 msgid "" "(Faulty delta. Please consider retrying with the option \"--forensic=http" "\" )." msgstr "" #: ../debdelta:1259 ../debdelta:1280 msgid "There were faulty deltas." msgstr "" #: ../debdelta:1259 msgid "Now invoking the mail sender to send the logs." msgstr "" #: ../debdelta:1261 msgid "(hit any key)" msgstr "" #: ../debdelta:1280 msgid "Sending logs to server." msgstr "" #: ../debdelta:1295 msgid "Server answers:" msgstr "" #: ../debdelta:1298 #, python-format msgid "Faulty delta. Please send by email to %s the following files:\n" msgstr "" #: ../debdelta:1431 msgid "Delta is not signed:" msgstr "" #: ../debdelta:1433 msgid "WARNING, delta is not signed:" msgstr "" #: ../debdelta:2018 #, python-format msgid "Patching done, time %(time).2fsec, speed %(speed)dk/sec" msgstr "" #: ../debdelta:2020 #, python-format msgid "(script %(time).2fsec %(speed)dk/sec)" msgstr "" #: ../debdelta:2024 #, python-format msgid "(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)" msgstr "" #: ../debdelta:2026 ../debdelta:2028 #, python-format msgid "(unaccounted %.2fsec)" msgstr "" #: ../debdelta:3290 #, python-format msgid "" "delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of " "%(tot)dkB." msgstr "" #: ../debdelta:3292 #, python-format msgid "" "delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time " "%(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)" msgstr "" #: ../debdelta:3305 msgid "Warning, no non-option arguments, debdeltas does nothing." msgstr "" #: ../debdelta:3309 msgid "Warning, no --old arguments, debdeltas will not generate any deltas." msgstr "" #: ../debdelta:3714 #, python-format msgid "Not enough disk space for storing `%s'." msgstr "" #: ../debdelta:3771 msgid "Creating:" msgstr "" #: ../debdelta:3808 msgid "Error: testing of delta failed:" msgstr "" #: ../debdelta:3855 #, python-format msgid "Total running time: %.1f" msgstr "" #: ../debdelta:3970 #, python-format msgid "(sources.conf does not provide a server for `%s')" msgstr "" #: ../debdelta:3985 msgid "" " Proxy settings detected in the environment; using \"urllib2\" for " "downloading; but\n" " this disables some features and is in general slower and buggier. See man " "page." msgstr "" #: ../debdelta:3997 ../debdelta:3999 msgid "Initializing APT cache..." msgstr "" #: ../debdelta:4027 msgid "Upgrading APT cache..." msgstr "" #: ../debdelta:4029 msgid "upgrading..." msgstr "" #: ../debdelta:4036 msgid "Failed! Safe upgrading APT cache..." msgstr "" #: ../debdelta:4038 msgid "failed! trying safe-upgrade..." msgstr "" #: ../debdelta:4042 msgid "Upgraded APT cache." msgstr "" #: ../debdelta:4044 msgid "done." msgstr "" #: ../debdelta:4081 #, python-format msgid "Recreated debs are saved in the directory %s" msgstr "" #: ../debdelta:4124 #, python-format msgid "Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "" #: ../debdelta:4395 msgid "Not enough disk space to download:" msgstr "" #: ../debdelta:4420 #, python-format msgid "Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "" #: ../debdelta:4556 #, python-format msgid "Downloading head of %s..." msgstr "" #: ../debdelta:4576 msgid "Delta is too big:" msgstr "" #: ../debdelta:4582 ../debdelta:4584 msgid "Delta is not present:" msgstr "" #: ../debdelta:4594 #, python-format msgid "Downloaded head of %s." msgstr "" #: ../debdelta:4632 #, python-format msgid "Deltas: %(present)d present and %(absent)d not," msgstr "" #: ../debdelta:4634 #, python-format msgid "" "downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec." msgstr "" #: ../debdelta:4637 #, python-format msgid "Need to get %s of deltas." msgstr "" #: ../debdelta:4673 msgid "You may wish to rerun, to get also:" msgstr "" #: ../debdelta:4728 msgid "Delta-upgrade statistics:" msgstr "" #: ../debdelta:4729 #, python-format msgid "size %(size)s time %(time)dsec speed %(speed)s/sec" msgstr "" #: ../debdelta:4734 msgid "downloaded deltas, " msgstr "" #: ../debdelta:4739 msgid "patching to debs, " msgstr "" #: ../debdelta:4744 msgid "downloaded debs, " msgstr "" #: ../debdelta:4749 #, python-format msgid "" "total resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/" "sec" msgstr "" #: ../debdelta:4767 msgid "Need a filename; try --help." msgstr "" #: ../debdelta:4788 ../debdelta:4812 msgid "Need 3 filenames; try --help." msgstr "" #: ../debdelta:4832 msgid "Error: argument is not a directory or a regular file:" msgstr "" #: ../debdelta:4885 #, python-format msgid "Lookup %s" msgstr "" #: ../debdelta:4891 #, python-format msgid "Sorry, cannot find an URI to download the debian package of `%s'." msgstr "" #: ../debdelta:4907 #, python-format msgid "Sorry, no source is available to upgrade `%s'." msgstr "" #: ../debdelta:4911 #, python-format msgid "Sorry, the package `%s' is already at its newest version." msgstr "" debdelta/po/it.po0000644000000000000000000004061012436652141011107 0ustar # Italian translations of 'debdelta' program messages # Copyright (C) A Mennucci # This file is distributed under the same license as the 'debdelta' package. # a m , 2012. # msgid "" msgstr "" "Project-Id-Version: debdelta\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2012-08-26 22:24+0200\n" "PO-Revision-Date: 2012-08-26 22:17+0200\n" "Last-Translator: A Mennucc \n" "Language-Team: italian \n" "Language: it\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" #: ../debdelta:30 msgid "" "Usage: debdelta [ option... ] fromfile tofile delta\n" " Computes the difference of two deb files, from fromfile to tofile, and " "writes it to delta\n" "\n" "Options:\n" "--signing-key KEY\n" " gnupg key used to sign the delta\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old deb is " "available\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs\n" msgstr "" "Uso: debdelta [ opzioni... ] DAFILE AFILE DELTA\n" " Calcola la differenza fra due deb, da DAFILE a AFILE, e la scrive in " "DELTA\n" "\n" "Opzione:\n" "--signing-key KEY\n" " chiave usata per firmare il delta (usando GnuPG)\n" "--no-md5 non includere informazione MD5 nel delta\n" "--needsold crea un delta che può essere usato solo se il\n" " vecchio deb è disponibile\n" " -M Mb memoria massima da usare (per \"bsdiff\" o \"delta\")\n" "--delta-algo ALGO\n" " usa uno specifico backend per calcolare i diff binari\n" #: ../debdelta:45 msgid "" "Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files]\n" " Computes all missing deltas for deb files.\n" " It orders by version number and produce deltas to the newest version\n" "\n" "Options:\n" "--signing-key KEY\n" " key used to sign the deltas (using GnuPG)\n" "--dir DIR force saving of deltas in this DIR\n" " (otherwise they go in the dir of the newer deb_file)\n" "--old ARGS 'Packages' files containing list of old versions of debs\n" "--alt ARGS for any cmdline argument, search for debs also in this place\n" " -n N how many deltas to produce for each deb (default unlimited)\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old .deb is " "available\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs;\n" " possible values are: xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--clean-deltas delete deltas if newer deb is not in archive\n" msgstr "" "Uso: debdeltas [ opzioni... ] [deb e directory, o file \"Packages\"]\n" " Calcola i delta per i pacchetti Debian.\n" " Li ordina per versione e produce i delta verso la versione più nuova.\n" "\n" "Opzioni:\n" "--signing-key CHIAVE\n" " chiave GnuPG usata per firmare i delta\n" "--dir DIR forza il salvataggio dei delta in DIR\n" " (altrimenti vanno nella directory del 'deb' più nuovo)\n" "--old ARG file 'Packages' che contengono le liste delle versioni\n" " vecchie dei deb\n" "--alt ARG per ogni argomento nella riga di comando,\n" " cerca anche in queste directory\n" " -n N quanti delta creare per ogni deb (valore predefinito " "illimitato)\n" "--no-md5 non includere informazioni MD5 nel delta\n" "--needsold crea un delta che può essere usato solo se il vecchio\n" " deb è disponibile\n" "--delta-algo ALGO\n" " usa il backend ALGO per calcolare le differenze binarie;\n" " valori possibili sono: xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb massima memoria da usare (per \"bsdiff\" o \"xdelta\")\n" "--clean-deltas\n" " elimina i delta se il deb più nuovo non è presente in archivio\n" #: ../debdelta:69 msgid "" "Usage: debpatch [ option... ] delta fromfile tofile \n" " Applies delta to fromfile and produces a reconstructed version of " "tofile.\n" "\n" "(When using 'debpatch' and the old .deb is not available,\n" " use '/' for the fromfile.)\n" "\n" "Usage: debpatch --info delta\n" " Write info on delta.\n" "\n" "Options:\n" "--no-md5 do not verify MD5 (if found in info in delta)\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created deb\n" msgstr "" "Uso: debpatch [ opzioni... ] DELTA DAFILE AFILE \n" " Applica DELTA a DAFILE e produce una versione ricostruita di AFILE.\n" "\n" "(Se il vecchio deb non è disponibile, usa \"/\" come fromfile)\n" "\n" "Uso: debpatch --info DELTA\n" " Scrive informazioni sul DELTA.\n" "\n" "Opzioni:\n" "--no-md5 non verifica lo MD5 (se è presente nelle informazione in delta)\n" " -A accetta delta non firmati\n" "--format FORMATO\n" " formato del deb ricostruito\n" #: ../debdelta:86 msgid "" "Usage: debdelta-upgrade [package names]\n" " Downloads all deltas and apply them to create the debs\n" " that are needed by 'apt-get upgrade'.\n" "\n" "Options:\n" "--dir DIR directory where to save results\n" "--deb-policy POLICY\n" " policy to decide which debs to download,\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created debs\n" msgstr "" "Uso: debdelta-upgrade [nomi pacchetti]\n" " Scarica i delta e li applica per creare i deb che servono\n" " per un 'apt-get upgrade'\n" "\n" "Opzioni:\n" "--dir DIR directory in cui salvare i pacchetti\n" "--deb-policy POLITICA\n" " politica per accettare quali pacchetti scaricare\n" " -A accetta delta non firmati\n" "--format FORMATO\n" " formato del deb ricostruito\n" #: ../debdelta:100 msgid "" "Usage: debpatch-url [package names]\n" " Show URL wherefrom to downloads all deltas that may be used to upgrade the " "given package names\n" msgstr "" "Uso: debpatch-url [nomi pacchetti]\n" " Mostra lo URL da cui scaricare i delta che possono aggiornare i\n" " pacchetti indicati.\n" #: ../debdelta:105 msgid "" " -v verbose (can be added multiple times)\n" "--no-act do not do that (whatever it is!)\n" " -d add extra debugging checks\n" " -k keep temporary files (use for debugging)\n" "--gpg-home HOME\n" " specify a different home for GPG\n" "\n" "See man page for more options and details.\n" msgstr "" " -v prolisso (si può usare più volte)\n" "--no-act non far nulla (far finta di lavorare)\n" " -d aggiungi controlli di debug extra\n" " -k tiene i file temporanei (per debug)\n" "--gpg-home HOME\n" " specifica una diversa directory HOME per GPG\n" "\n" "Si veda la pagina di manuale per ulteriori opzioni e dettagli.\n" #: ../debdelta:279 msgid "Error: argument of --dir is not a directory:" msgstr "Errore: l'argomento di --dir non è una directory:" #: ../debdelta:287 msgid "Error: argument of --forensicdir is not a directory:" msgstr "Errore: l'argomento di --forensicdir non è una directory:" #: ../debdelta:303 msgid "Error: argument of --alt is not a directory or a regular file:" msgstr "Errore: l'argomento di --alt non è una directory o un file regolare:" #: ../debdelta:308 msgid "Error: argument of --old is not a directory or a regular file:" msgstr "Errore: l'argomento di --old non è una directory o un file regolare:" #: ../debdelta:324 #, python-format msgid "Error: --gpg-home `%s' does not exist." msgstr "Errore: --gpg-home '%s' non esiste." #: ../debdelta:328 #, python-format msgid "Error: output format `%s' is unknown." msgstr "Errore: il formato di uscita '%s' è sconosciuto." #: ../debdelta:332 #, python-format msgid "Error: option `%s' is unknown, try --help" msgstr "Errore: l'opzione '%s' è sconosciuta, vedere --help" #: ../debdelta:337 #, python-format msgid "Error: feature `%s' cannot be disabled." msgstr "Errore: la caratteristica '%s' non può essere disabilitata." #: ../debdelta:672 ../debdelta:682 #, python-format msgid "Error: the file `%s' does not exist." msgstr "Errore: il file '%s' non esiste." #: ../debdelta:674 ../debdelta:684 #, python-format msgid "Error: `%s' is not a regular file." msgstr "Errore: '%s' non è un file regolare." #: ../debdelta:677 #, python-format msgid "Error: `%s' does not seem to be a Debian package." msgstr "Errore: '%s' non sembra essere un pacchetto Debian." #: ../debdelta:687 #, python-format msgid "Error: `%s' does not seem to be a Debian delta." msgstr "Errore: '%s' non sembra essere un delta Debian." #: ../debdelta:984 #, python-format msgid "" "not enough disk space (%(free)dkB) in directory %(dir)s for applying delta " "(needs %(size)dkB)" msgstr "" "non c'è abbastanza spazio su disco (%(free)dkB) nella directory\n" "%(dir)s per applicare il delta (servono %(size)dkB)" #: ../debdelta:1246 ../debdelta:1303 msgid "" "(Faulty delta. Please consider retrying with the option \"--forensic=http" "\" )." msgstr "" "(Questo delta è difettoso. Potreste riprovare con l'opzione \"--forensic=http" "\")." #: ../debdelta:1259 ../debdelta:1280 msgid "There were faulty deltas." msgstr "Alcuni delta erano difettosi." #: ../debdelta:1259 msgid "Now invoking the mail sender to send the logs." msgstr "" "Ora verrà avviato il programma di invio posta elettronica per mandare\n" "i registri." #: ../debdelta:1261 msgid "(hit any key)" msgstr "(premere un qualunque tasto)" #: ../debdelta:1280 msgid "Sending logs to server." msgstr "Sto mandando i registri al server." #: ../debdelta:1295 msgid "Server answers:" msgstr "Il server risponde:" #: ../debdelta:1298 #, python-format msgid "Faulty delta. Please send by email to %s the following files:\n" msgstr "" "Questo delta è difettoso. Si prega di inviare per e-mail a %s i\n" "seguenti file:\n" #: ../debdelta:1431 msgid "Delta is not signed:" msgstr "Il delta non è firmato:" #: ../debdelta:1433 msgid "WARNING, delta is not signed:" msgstr "ATTENZIONE: il delta non è firmato:" #: ../debdelta:2018 #, python-format msgid "Patching done, time %(time).2fsec, speed %(speed)dk/sec" msgstr "Creato il deb, tempo: %(time).2fsec, velocità: %(speed)dk/sec" #: ../debdelta:2020 #, python-format msgid "(script %(time).2fsec %(speed)dk/sec)" msgstr "(script %(time).2fsec %(speed)dk/sec)" #: ../debdelta:2024 #, python-format msgid "(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)" msgstr "(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)" #: ../debdelta:2026 ../debdelta:2028 #, python-format msgid "(unaccounted %.2fsec)" msgstr "(altro %.2fsec)" #: ../debdelta:3290 #, python-format msgid "" "delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of " "%(tot)dkB." msgstr "" "il delta è %(perc)3.1f%% del deb; cioè, sono stati risparmiati %(save)dkB su " "un totale di %(tot)dkB." #: ../debdelta:3292 #, python-format msgid "" "delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time " "%(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)" msgstr "" "delta tempo %(time).2f sec, velocità %(speed)dkB /sec, (%(algo)s tempo " "%(algotime).2fsec velocità %(algospeed)dkB /sec) (corr %(corrtime).2f sec)" #: ../debdelta:3305 msgid "Warning, no non-option arguments, debdeltas does nothing." msgstr "Attenzione, non ci sono argomenti non-opzioni, debdeltas non fa nulla." #: ../debdelta:3309 msgid "Warning, no --old arguments, debdeltas will not generate any deltas." msgstr "Attenzione, manca l'opzione --old, debdeltas non genererà alcun delta." #: ../debdelta:3714 #, python-format msgid "Not enough disk space for storing `%s'." msgstr "Non c'è abbastanza spazio nel disco per salvare '%s'." #: ../debdelta:3771 msgid "Creating:" msgstr "Creazione di:" #: ../debdelta:3808 msgid "Error: testing of delta failed:" msgstr "Errore: il test del delta è fallito:" #: ../debdelta:3855 #, python-format msgid "Total running time: %.1f" msgstr "Tempo totale: %.1f" #: ../debdelta:3970 #, python-format msgid "(sources.conf does not provide a server for `%s')" msgstr "(il 'sources.conf' non specifica un server per '%s')" #: ../debdelta:3985 msgid "" " Proxy settings detected in the environment; using \"urllib2\" for " "downloading; but\n" " this disables some features and is in general slower and buggier. See man " "page." msgstr "" "Un proxy è definito nell'ambiente; verrà usato \"urllib2\" per\n" "scaricare; ma questo disabilita alcune caratteristiche e sarà\n" "più lento e malfunzionante. Vedere la pagina di manuale." #: ../debdelta:3997 ../debdelta:3999 msgid "Initializing APT cache..." msgstr "Inizializzazione della cache di APT..." #: ../debdelta:4027 msgid "Upgrading APT cache..." msgstr "Aggiornamento della cache di APT..." #: ../debdelta:4029 msgid "upgrading..." msgstr "aggiornamento..." #: ../debdelta:4036 msgid "Failed! Safe upgrading APT cache..." msgstr "Fallito! Aggiornamento sicuro della cache di APT..." #: ../debdelta:4038 msgid "failed! trying safe-upgrade..." msgstr "fallito! provo l'aggiornamento sicuro..." #: ../debdelta:4042 msgid "Upgraded APT cache." msgstr "La cache di APT è stata aggiornata." #: ../debdelta:4044 msgid "done." msgstr "fatto." #: ../debdelta:4081 #, python-format msgid "Recreated debs are saved in the directory %s" msgstr "I pacchetti deb ricreati saranno salvati nella directory %s" #: ../debdelta:4124 #, python-format msgid "Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "Creato, tempo %(time)5.2fsec, velocità %(speed)4s/sec, %(name)s" #: ../debdelta:4395 msgid "Not enough disk space to download:" msgstr "Non vi è abbastanza spazio disco per scaricare:" #: ../debdelta:4420 #, python-format msgid "Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "Scaricato, tempo %(time)5.2fsec, velocità %(speed)4s/sec, %(name)s" #: ../debdelta:4556 #, python-format msgid "Downloading head of %s..." msgstr "Scaricamento dell'inizio di %s..." #: ../debdelta:4576 msgid "Delta is too big:" msgstr "Il delta è troppo grande:" #: ../debdelta:4582 ../debdelta:4584 msgid "Delta is not present:" msgstr "Il delta non è presente:" #: ../debdelta:4594 #, python-format msgid "Downloaded head of %s." msgstr "Scaricato l'inizio di %s." #: ../debdelta:4632 #, python-format msgid "Deltas: %(present)d present and %(absent)d not," msgstr "Delta: %(present)d presenti e %(absent)d non presenti," #: ../debdelta:4634 #, python-format msgid "" "downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec." msgstr "" "scaricato finora: tempo %(time).2fsec, dimensione %(size)s, velocità " "%(speed)4s/sec." #: ../debdelta:4637 #, python-format msgid "Need to get %s of deltas." msgstr "Bisogna ancora scaricare %s di delta." #: ../debdelta:4673 msgid "You may wish to rerun, to get also:" msgstr "Si può riprovare più tardi per scaricare anche:" #: ../debdelta:4728 msgid "Delta-upgrade statistics:" msgstr "Statistiche di debdelta-upgrade:" #: ../debdelta:4729 #, python-format msgid "size %(size)s time %(time)dsec speed %(speed)s/sec" msgstr "dimensione %(size)s tempo %(time)dsec velocità %(speed)s/sec" #: ../debdelta:4734 msgid "downloaded deltas, " msgstr "delta scaricati, " #: ../debdelta:4739 msgid "patching to debs, " msgstr "deb creati, " #: ../debdelta:4744 msgid "downloaded debs, " msgstr "deb scaricati, " #: ../debdelta:4749 #, python-format msgid "" "total resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/" "sec" msgstr "" "totale deb risultanti, dimensione %(size)s tempo %(time)dsec velocità " "virtuale %(speed)s/sec" #: ../debdelta:4767 msgid "Need a filename; try --help." msgstr "È necessario un nome di file; si veda --help." #: ../debdelta:4788 ../debdelta:4812 msgid "Need 3 filenames; try --help." msgstr "Sono necessari tre nomi di file; si veda --help." #: ../debdelta:4832 msgid "Error: argument is not a directory or a regular file:" msgstr "Errore: l'argomento non è una directory o un file regolare:" #: ../debdelta:4885 #, python-format msgid "Lookup %s" msgstr "Cerco %s" #: ../debdelta:4891 #, python-format msgid "Sorry, cannot find an URI to download the debian package of `%s'." msgstr "" "Non è possibile trovare un URI per scaricare il pacchetto\n" "Debian di '%s'." #: ../debdelta:4907 #, python-format msgid "Sorry, no source is available to upgrade `%s'." msgstr "Non è disponibile alcuna sorgente per l'aggiornamento di '%s'." #: ../debdelta:4911 #, python-format msgid "Sorry, the package `%s' is already at its newest version." msgstr "Il pacchetto '%s' è già alla versione più recente." debdelta/po/Makefile0000644000000000000000000000241212436652141011571 0ustar all : messages.pot it.mo fr.mo messages.pot: ../debdelta Makefile xgettext -L Python -d debdelta --from-code=UTF-8 -o messages.pot ../debdelta for L in it fr ; do \ msgmerge --previous -o $$L~new~.po $$L.po messages.pot ; \ if ! diff -q $$L~new~.po $$L.po ; then mv $$L~new~.po $$L.po ; echo Please edit $$L.po ; else rm $$L~new~.po ; echo No significant changes ; fi ; done it.mo: it.po msgfmt -c -o it.mo~~~~ it.po && mv it.mo~~~~ it.mo fr.mo: fr.po msgfmt -c -o fr.mo~~~~ fr.po && mv fr.mo~~~~ fr.mo #perche' diavolo non funziona questo? .po.mo : msgfmt -c -o ~~$@~~ $< && mv ~~$@~~ $@ #lib/po/messages.pot : Makefile lib/EElection/*py lib/EElection/plugins/*py lib/EBallotbox/*py # xgettext -o lib/po/messages.pot lib/EElection/*py lib/EElection/plugins/*py lib/EBallotbox/*py # cd lib/po && msgmerge -o itnew.po it.po messages.pot # @if ! diff -q lib/po/itnew.po lib/po/it.po ; then mv lib/po/itnew.po lib/po/it.po ; echo Please edit lib/po/it.po ; else rm lib/po/itnew.po ; echo No changes ; fi #lib/locale/it/LC_MESSAGES/e-election.mo : lib/po/it.po # msgfmt -o lib/locale/it/LC_MESSAGES/e-election.mo~~ --statistics lib/po/it.po && mv lib/locale/it/LC_MESSAGES/e-election.mo~~ lib/locale/it/LC_MESSAGES/e-election.mo debdelta/po/fr.mo0000644000000000000000000003552312436652141011106 0ustar Kte`$aL* w0%1 ?$ d n    /  ? L &f / 1 " 5 >H , 4 > '( )P %z  $ # > H b l   ' " . 7;,sA. 99s^&DD9#V>K@]_2P TaO ! 6!)>!7h!!F! ! "#"@" _"4"%"G"&##-J#9x#:#?#K-$Ty$<$D %TP%G%?%9-&,g&-&5&^&W' v'2'-'$'5 (/?(;o(G(7("+)N)Wf)H)I*Q* p*#**{*P-t.o46'C7Gk7C727*88 F9Q9f9T~9H9x::9:[:A;  & 1,>6)@?(;'7E/ "-. 2GA:8*I5%49KB+<3F=JH# !C0D$ -v verbose (can be added multiple times) --no-act do not do that (whatever it is!) -d add extra debugging checks -k keep temporary files (use for debugging) --gpg-home HOME specify a different home for GPG See man page for more options and details. Proxy settings detected in the environment; using "urllib2" for downloading; but this disables some features and is in general slower and buggier. See man page.(Faulty delta. Please consider retrying with the option "--forensic=http" ).(hit any key)(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)(script %(time).2fsec %(speed)dk/sec)(sources.conf does not provide a server for `%s')(unaccounted %.2fsec)Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)sCreating:Delta is not present:Delta is not signed:Delta is too big:Delta-upgrade statistics:Deltas: %(present)d present and %(absent)d not,Downloaded head of %s.Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)sDownloading head of %s...Error: --gpg-home `%s' does not exist.Error: `%s' does not seem to be a Debian delta.Error: `%s' does not seem to be a Debian package.Error: `%s' is not a regular file.Error: argument is not a directory or a regular file:Error: argument of --alt is not a directory or a regular file:Error: argument of --dir is not a directory:Error: argument of --forensicdir is not a directory:Error: argument of --old is not a directory or a regular file:Error: feature `%s' cannot be disabled.Error: option `%s' is unknown, try --helpError: output format `%s' is unknown.Error: testing of delta failed:Error: the file `%s' does not exist.Failed! Safe upgrading APT cache...Faulty delta. Please send by email to %s the following files: Initializing APT cache...Lookup %sNeed 3 filenames; try --help.Need a filename; try --help.Need to get %s of deltas.Not enough disk space for storing `%s'.Not enough disk space to download:Now invoking the mail sender to send the logs.Patching done, time %(time).2fsec, speed %(speed)dk/secRecreated debs are saved in the directory %sSending logs to server.Server answers:Sorry, cannot find an URI to download the debian package of `%s'.Sorry, no source is available to upgrade `%s'.Sorry, the package `%s' is already at its newest version.There were faulty deltas.Total running time: %.1fUpgraded APT cache.Upgrading APT cache...Usage: debdelta [ option... ] fromfile tofile delta Computes the difference of two deb files, from fromfile to tofile, and writes it to delta Options: --signing-key KEY gnupg key used to sign the delta --no-md5 do not include MD5 info in delta --needsold create a delta that can only be used if the old deb is available -M Mb maximum memory to use (for 'bsdiff' or 'xdelta') --delta-algo ALGO use a specific backend for computing binary diffs Usage: debdelta-upgrade [package names] Downloads all deltas and apply them to create the debs that are needed by 'apt-get upgrade'. Options: --dir DIR directory where to save results --deb-policy POLICY policy to decide which debs to download, -A accept unsigned deltas --format FORMAT format of created debs Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files] Computes all missing deltas for deb files. It orders by version number and produce deltas to the newest version Options: --signing-key KEY key used to sign the deltas (using GnuPG) --dir DIR force saving of deltas in this DIR (otherwise they go in the dir of the newer deb_file) --old ARGS 'Packages' files containing list of old versions of debs --alt ARGS for any cmdline argument, search for debs also in this place -n N how many deltas to produce for each deb (default unlimited) --no-md5 do not include MD5 info in delta --needsold create a delta that can only be used if the old .deb is available --delta-algo ALGO use a specific backend for computing binary diffs; possible values are: xdelta xdelta-bzip xdelta3 bsdiff -M Mb maximum memory to use (for 'bsdiff' or 'xdelta') --clean-deltas delete deltas if newer deb is not in archive Usage: debpatch [ option... ] delta fromfile tofile Applies delta to fromfile and produces a reconstructed version of tofile. (When using 'debpatch' and the old .deb is not available, use '/' for the fromfile.) Usage: debpatch --info delta Write info on delta. Options: --no-md5 do not verify MD5 (if found in info in delta) -A accept unsigned deltas --format FORMAT format of created deb Usage: debpatch-url [package names] Show URL wherefrom to downloads all deltas that may be used to upgrade the given package names WARNING, delta is not signed:Warning, no --old arguments, debdeltas will not generate any deltas.Warning, no non-option arguments, debdeltas does nothing.You may wish to rerun, to get also:delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of %(tot)dkB.delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time %(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)done.downloaded debs, downloaded deltas, downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec.failed! trying safe-upgrade...not enough disk space (%(free)dkB) in directory %(dir)s for applying delta (needs %(size)dkB)patching to debs, size %(size)s time %(time)dsec speed %(speed)s/sectotal resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/secupgrading...Project-Id-Version: debdelta Report-Msgid-Bugs-To: POT-Creation-Date: 2012-08-26 22:24+0200 PO-Revision-Date: 2012-08-02 23:25+0200 Last-Translator: Thomas Blein Language-Team: French Language: fr MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit X-Generator: Lokalize 1.4 Plural-Forms: nplurals=2; plural=(n > 1); -v verbosité (peut être utilisé plusieurs fois) --no-act ne pas effectuer l'action (quelle qu'elle soit !) -d ajouter des vérifications de débogage supplémentaires -k garder les fichiers temporaires (utilisés à des fins de débogage) --gpg-home HOME indiquer un répertoire personnel différent pour GPG Consultez la page de manuel pour plus d'option et de détails. Configuration de proxy détectée dans l'environnement ; utilisation de « urllib2 » pour le téléchargement ; mais cela désactive certaines caractéristiques et c'est généralement plus lent. Consultez la page de manuel.(Delta défectueux. Veuillez réessayer avec l'option « --forensic=http »).(Pressez n'importe quelle touche)(prelink %(time).2f sec, %(size)d k, %(speed)d k/s)(script %(time).2f sec %(speed)d k/sec)(sources.conf ne fournit aucun serveur pour « %s »)(inexpliqué %.2f sec)Créé, temps %(time)5.2f sec, vitesse %(speed)4s/sec, %(name)sCréation :Le delta est absent :Le delta n'est pas signé :Le delta est trop important :Statistiques de delta-upgrade :Deltas : %(present)d présent et %(absent)d absent,En-tête du fichier %s téléchargé.Téléchargé, temps %(time)5.2f sec, vitesse %(speed)4s/sec, %(name)sTéléchargement de l'entête de %s…Erreur : --gpg-home « %s » n'existe pas.Erreur : « %s » ne semble pas être un delta Debian.Erreur : « %s » ne semble pas être un paquet Debian.Erreur : le fichier « %s » n'est pas un fichier classique.Erreur : le paramètre n'est pas un répertoire ou un fichier classique :Erreur : le paramètre de --alt n'est pas un répertoire ou un fichier classique :Erreur : le paramètre de --dir n'est pas un répertoire :Erreur : le paramètre de --forensicdir n'est pas un répertoire :Erreur : le paramètre de --old n'est pas un répertoire ou un fichier classique :Erreur : la fonctionnalité « %s » ne peut pas être désactivée.Erreur : l'option « %s » n'est pas connue. Essayez --help.Erreur : le format de sortie « %s » n'est pas connu.Erreur : les tests de delta ont échoué :Erreur : le fichier « %s » n'existe pas.Échec ! Mise à niveau conservative du cache APT…Delta défectueux. Veuillez envoyer par courrier électronique à %s les fichiers suivants : Initialisation du cache APT…Recherche %sBesoin de trois noms de fichier ; essayez --help.Besoin d'un nom de fichier ; essayez --help.Besoin de récupérer %s des deltas.Pas assez d'espace disque pour enregistrer « %s »Pas assez d'espace disque pour télécharger :Invocation de l'envoyeur de mail pour envoyer les journaux.Correction effectuée, temps %(time).2f sec, vitesse %(speed)d k/sec.Les debs recréés sont sauvés dans le répertoire %s.Envoi des journaux sur le serveur.Réponses du serveur :Désolé, impossible de trouver une URI pour télécharger le paquet debian « %s ».Désolé, aucune source disponible pour la mise à niveau de « %s ».Désolé, le paquet « %s » est déjà à la version la plus récente.Il y a des deltas défectueux.Temps d'exécution total : %.1fLe cache APT a été mis à niveau.Mise à niveau du cache APT…Usage : debdelta [OPTION] FICHIER_ORIGINE FICHIER_DESTINATION DELTA Calcule la différence entre deux fichiers deb, entre FICHIER_ORIGINE et FICHIER_DESTINATION, et l'inscrit dans DELTA. Options : --signing-key CLÉ clé gnupg à utiliser pour signer la différence --no-md5 ne pas inclure les informations MD5 de la différence --needsold créer un delta qui ne peut être utilisé que si le vieux deb est disponible -M Mb mémoire maximum à utiliser (pour 'bsdiff' ou 'xdelta') --delta-algo ALGO utilise une dorsale spécifique pour calculer les différences binaires Usage : debdelta-upgrade [NOMS_DE_PAQUET] Télécharger tous les deltas et les appliquer pour créer les debs qui sont requis par « apt-get upgrade ». Options : --dir RÉP répertoire où les résultats seront enregistrés --deb-policy RÈGLE règle utilisée pour décider quels debs seront téléchargés. -A accepter les deltas non signés --format FORMAT format du deb créé Usage : debdeltas [OPTION] [fichiers et dossiers deb, ou fichiers 'Packages'] Calcule tous les deltas manquants pour les fichiers deb. Les fichiers deb sont classés par numéro de version et des deltas sont produits par rapport à la version la plus récente. Options : --signing-key KEY clé utilisée pour signer les deltas (utilisant GnuPG) --dir RÉP force l'enregistrement des deltas dans le répertoire RÉP (sinon ils sont enregistrés dans le dossier du fichier deb le plus récent) --old ARGS fichier 'Packages' contenant une liste des anciennes versions des fichiers deb --alt ARGS pour tout paramètre de ligne de commande, rechercher aussi pour des debs également à cet endroit. -n N nombre de deltas à produire pour chaque deb (illimité par défaut) --no-md5 ne pas inclure les informations MD5 de la différence --needsold créer un delta qui ne peut être utilisé que si le vieux .deb est disponible --delta-algo ALGO utilise une dorsale spécifique pour calculer les différences binaires les valeurs possibles sont : xdelta xdelta-bzip xdelta3 bsdiff -M Mb mémoire maximum à utiliser (pour 'bsdiff' ou 'xdelta') --clean-deltas supprimer les deltas si le deb plus récent n'est pas dans l'archive Usage : debpatch [OPTION] DELTA FICHIER_ORIGINE FICHIER_DESTINATION Applique un DELTA sur FICHIER_ORIGINE et produit une version reconstruite de FICHIER_DESTINATION. (Lors de l'utilisation de 'debpatch' et si le vieux .deb n'est pas disponible, utilisez « / » comme FICHIER_ORIGINE.) Usage : debpatch --info DELTA Écrire les informations du DELTA. Options : --no-md5 ne pas vérifier le MD5 (si trouvé dans les infos de delta). -A accepter les deltas non signés. --format FORMAT format du deb créé Usage : debpatch-url [NOMS_DE_PAQUET] Afficher l'URL utilisée pour le téléchargement de tous les deltas qui peuvent être utilisés pour mettre à jour les paquets précisés. ATTENTION, le delta n'est pas signé :Attention, aucun paramètre --old, debdeltas ne générera aucun delta.Attention, aucun paramètre non-option, aucune action de debdeltas.Vous pourriez relancer, pour obtenir également :Le delta a une taille de %(perc)3.1f %% du deb ; c'est-à-dire que %(save)d kB ont été économisés sur un total de %(tot)d kB.delta temps %(time).2f sec, vitesse %(speed)d kB/sec, (%(algo)s temps %(algotime).2f sec vitesse %(algospeed)d kB/sec) (corr %(corrtime).2f sec)Effectué.debs téléchargé, deltas téléchargés, déjà téléchargé : %(time).2f sec, taille %(size)s, vitesse de %(speed)4s/sec.Échec ! Essai d'une mise à niveau conservative (« safe-upgrade »)pas assez d'espace disque (%(free)d kB) dans le répertoire %(dir)s pour appliquer les deltas (besoin de %(size)d kB).correction des debs, taille %(size)s temps %(time)d sec vitesse %(speed)s/secdebs résultant totaux, taille %(size)s temps %(time)d sec vitesse virtuelle %(speed)s/secmise à niveau…debdelta/po/it.mo0000644000000000000000000003322612436652141011111 0ustar Kte`$aL* w0%1 ?$ d n    /  ? L &f / 1 " 5 >H , 4 > '( )P %z  $ # > H b l   ' " . 7;,sA. 99s^&DD9#V>K@]_2P TVaIP 0( %Y 4  C !!0!I! d!6!!C!!"#<"/`"3"%"<"E'#2m#:#E#<!$4^$1$%$ $3 %P@%&%%0%.%%!&6G&0~&R&>';A'"}''J'?'5?(u(($(#((j*^[,0{b2$2G3FK313f3+4444U4(>5tg5 5=5]'66  & 1,>6)@?(;'7E/ "-. 2GA:8*I5%49KB+<3F=JH# !C0D$ -v verbose (can be added multiple times) --no-act do not do that (whatever it is!) -d add extra debugging checks -k keep temporary files (use for debugging) --gpg-home HOME specify a different home for GPG See man page for more options and details. Proxy settings detected in the environment; using "urllib2" for downloading; but this disables some features and is in general slower and buggier. See man page.(Faulty delta. Please consider retrying with the option "--forensic=http" ).(hit any key)(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)(script %(time).2fsec %(speed)dk/sec)(sources.conf does not provide a server for `%s')(unaccounted %.2fsec)Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)sCreating:Delta is not present:Delta is not signed:Delta is too big:Delta-upgrade statistics:Deltas: %(present)d present and %(absent)d not,Downloaded head of %s.Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)sDownloading head of %s...Error: --gpg-home `%s' does not exist.Error: `%s' does not seem to be a Debian delta.Error: `%s' does not seem to be a Debian package.Error: `%s' is not a regular file.Error: argument is not a directory or a regular file:Error: argument of --alt is not a directory or a regular file:Error: argument of --dir is not a directory:Error: argument of --forensicdir is not a directory:Error: argument of --old is not a directory or a regular file:Error: feature `%s' cannot be disabled.Error: option `%s' is unknown, try --helpError: output format `%s' is unknown.Error: testing of delta failed:Error: the file `%s' does not exist.Failed! Safe upgrading APT cache...Faulty delta. Please send by email to %s the following files: Initializing APT cache...Lookup %sNeed 3 filenames; try --help.Need a filename; try --help.Need to get %s of deltas.Not enough disk space for storing `%s'.Not enough disk space to download:Now invoking the mail sender to send the logs.Patching done, time %(time).2fsec, speed %(speed)dk/secRecreated debs are saved in the directory %sSending logs to server.Server answers:Sorry, cannot find an URI to download the debian package of `%s'.Sorry, no source is available to upgrade `%s'.Sorry, the package `%s' is already at its newest version.There were faulty deltas.Total running time: %.1fUpgraded APT cache.Upgrading APT cache...Usage: debdelta [ option... ] fromfile tofile delta Computes the difference of two deb files, from fromfile to tofile, and writes it to delta Options: --signing-key KEY gnupg key used to sign the delta --no-md5 do not include MD5 info in delta --needsold create a delta that can only be used if the old deb is available -M Mb maximum memory to use (for 'bsdiff' or 'xdelta') --delta-algo ALGO use a specific backend for computing binary diffs Usage: debdelta-upgrade [package names] Downloads all deltas and apply them to create the debs that are needed by 'apt-get upgrade'. Options: --dir DIR directory where to save results --deb-policy POLICY policy to decide which debs to download, -A accept unsigned deltas --format FORMAT format of created debs Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files] Computes all missing deltas for deb files. It orders by version number and produce deltas to the newest version Options: --signing-key KEY key used to sign the deltas (using GnuPG) --dir DIR force saving of deltas in this DIR (otherwise they go in the dir of the newer deb_file) --old ARGS 'Packages' files containing list of old versions of debs --alt ARGS for any cmdline argument, search for debs also in this place -n N how many deltas to produce for each deb (default unlimited) --no-md5 do not include MD5 info in delta --needsold create a delta that can only be used if the old .deb is available --delta-algo ALGO use a specific backend for computing binary diffs; possible values are: xdelta xdelta-bzip xdelta3 bsdiff -M Mb maximum memory to use (for 'bsdiff' or 'xdelta') --clean-deltas delete deltas if newer deb is not in archive Usage: debpatch [ option... ] delta fromfile tofile Applies delta to fromfile and produces a reconstructed version of tofile. (When using 'debpatch' and the old .deb is not available, use '/' for the fromfile.) Usage: debpatch --info delta Write info on delta. Options: --no-md5 do not verify MD5 (if found in info in delta) -A accept unsigned deltas --format FORMAT format of created deb Usage: debpatch-url [package names] Show URL wherefrom to downloads all deltas that may be used to upgrade the given package names WARNING, delta is not signed:Warning, no --old arguments, debdeltas will not generate any deltas.Warning, no non-option arguments, debdeltas does nothing.You may wish to rerun, to get also:delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of %(tot)dkB.delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time %(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)done.downloaded debs, downloaded deltas, downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec.failed! trying safe-upgrade...not enough disk space (%(free)dkB) in directory %(dir)s for applying delta (needs %(size)dkB)patching to debs, size %(size)s time %(time)dsec speed %(speed)s/sectotal resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/secupgrading...Project-Id-Version: debdelta Report-Msgid-Bugs-To: POT-Creation-Date: 2012-08-26 22:24+0200 PO-Revision-Date: 2012-08-26 22:17+0200 Last-Translator: A Mennucc Language-Team: italian Language: it MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit -v prolisso (si può usare più volte) --no-act non far nulla (far finta di lavorare) -d aggiungi controlli di debug extra -k tiene i file temporanei (per debug) --gpg-home HOME specifica una diversa directory HOME per GPG Si veda la pagina di manuale per ulteriori opzioni e dettagli. Un proxy è definito nell'ambiente; verrà usato "urllib2" per scaricare; ma questo disabilita alcune caratteristiche e sarà più lento e malfunzionante. Vedere la pagina di manuale.(Questo delta è difettoso. Potreste riprovare con l'opzione "--forensic=http").(premere un qualunque tasto)(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)(script %(time).2fsec %(speed)dk/sec)(il 'sources.conf' non specifica un server per '%s')(altro %.2fsec)Creato, tempo %(time)5.2fsec, velocità %(speed)4s/sec, %(name)sCreazione di:Il delta non è presente:Il delta non è firmato:Il delta è troppo grande:Statistiche di debdelta-upgrade:Delta: %(present)d presenti e %(absent)d non presenti,Scaricato l'inizio di %s.Scaricato, tempo %(time)5.2fsec, velocità %(speed)4s/sec, %(name)sScaricamento dell'inizio di %s...Errore: --gpg-home '%s' non esiste.Errore: '%s' non sembra essere un delta Debian.Errore: '%s' non sembra essere un pacchetto Debian.Errore: '%s' non è un file regolare.Errore: l'argomento non è una directory o un file regolare:Errore: l'argomento di --alt non è una directory o un file regolare:Errore: l'argomento di --dir non è una directory:Errore: l'argomento di --forensicdir non è una directory:Errore: l'argomento di --old non è una directory o un file regolare:Errore: la caratteristica '%s' non può essere disabilitata.Errore: l'opzione '%s' è sconosciuta, vedere --helpErrore: il formato di uscita '%s' è sconosciuto.Errore: il test del delta è fallito:Errore: il file '%s' non esiste.Fallito! Aggiornamento sicuro della cache di APT...Questo delta è difettoso. Si prega di inviare per e-mail a %s i seguenti file: Inizializzazione della cache di APT...Cerco %sSono necessari tre nomi di file; si veda --help.È necessario un nome di file; si veda --help.Bisogna ancora scaricare %s di delta.Non c'è abbastanza spazio nel disco per salvare '%s'.Non vi è abbastanza spazio disco per scaricare:Ora verrà avviato il programma di invio posta elettronica per mandare i registri.Creato il deb, tempo: %(time).2fsec, velocità: %(speed)dk/secI pacchetti deb ricreati saranno salvati nella directory %sSto mandando i registri al server.Il server risponde:Non è possibile trovare un URI per scaricare il pacchetto Debian di '%s'.Non è disponibile alcuna sorgente per l'aggiornamento di '%s'.Il pacchetto '%s' è già alla versione più recente.Alcuni delta erano difettosi.Tempo totale: %.1fLa cache di APT è stata aggiornata.Aggiornamento della cache di APT...Uso: debdelta [ opzioni... ] DAFILE AFILE DELTA Calcola la differenza fra due deb, da DAFILE a AFILE, e la scrive in DELTA Opzione: --signing-key KEY chiave usata per firmare il delta (usando GnuPG) --no-md5 non includere informazione MD5 nel delta --needsold crea un delta che può essere usato solo se il vecchio deb è disponibile -M Mb memoria massima da usare (per "bsdiff" o "delta") --delta-algo ALGO usa uno specifico backend per calcolare i diff binari Uso: debdelta-upgrade [nomi pacchetti] Scarica i delta e li applica per creare i deb che servono per un 'apt-get upgrade' Opzioni: --dir DIR directory in cui salvare i pacchetti --deb-policy POLITICA politica per accettare quali pacchetti scaricare -A accetta delta non firmati --format FORMATO formato del deb ricostruito Uso: debdeltas [ opzioni... ] [deb e directory, o file "Packages"] Calcola i delta per i pacchetti Debian. Li ordina per versione e produce i delta verso la versione più nuova. Opzioni: --signing-key CHIAVE chiave GnuPG usata per firmare i delta --dir DIR forza il salvataggio dei delta in DIR (altrimenti vanno nella directory del 'deb' più nuovo) --old ARG file 'Packages' che contengono le liste delle versioni vecchie dei deb --alt ARG per ogni argomento nella riga di comando, cerca anche in queste directory -n N quanti delta creare per ogni deb (valore predefinito illimitato) --no-md5 non includere informazioni MD5 nel delta --needsold crea un delta che può essere usato solo se il vecchio deb è disponibile --delta-algo ALGO usa il backend ALGO per calcolare le differenze binarie; valori possibili sono: xdelta xdelta-bzip xdelta3 bsdiff -M Mb massima memoria da usare (per "bsdiff" o "xdelta") --clean-deltas elimina i delta se il deb più nuovo non è presente in archivio Uso: debpatch [ opzioni... ] DELTA DAFILE AFILE Applica DELTA a DAFILE e produce una versione ricostruita di AFILE. (Se il vecchio deb non è disponibile, usa "/" come fromfile) Uso: debpatch --info DELTA Scrive informazioni sul DELTA. Opzioni: --no-md5 non verifica lo MD5 (se è presente nelle informazione in delta) -A accetta delta non firmati --format FORMATO formato del deb ricostruito Uso: debpatch-url [nomi pacchetti] Mostra lo URL da cui scaricare i delta che possono aggiornare i pacchetti indicati. ATTENZIONE: il delta non è firmato:Attenzione, manca l'opzione --old, debdeltas non genererà alcun delta.Attenzione, non ci sono argomenti non-opzioni, debdeltas non fa nulla.Si può riprovare più tardi per scaricare anche:il delta è %(perc)3.1f%% del deb; cioè, sono stati risparmiati %(save)dkB su un totale di %(tot)dkB.delta tempo %(time).2f sec, velocità %(speed)dkB /sec, (%(algo)s tempo %(algotime).2fsec velocità %(algospeed)dkB /sec) (corr %(corrtime).2f sec)fatto.deb scaricati, delta scaricati, scaricato finora: tempo %(time).2fsec, dimensione %(size)s, velocità %(speed)4s/sec.fallito! provo l'aggiornamento sicuro...non c'è abbastanza spazio su disco (%(free)dkB) nella directory %(dir)s per applicare il delta (servono %(size)dkB)deb creati, dimensione %(size)s tempo %(time)dsec velocità %(speed)s/sectotale deb risultanti, dimensione %(size)s tempo %(time)dsec velocità virtuale %(speed)s/secaggiornamento...debdelta/po/fr.po0000644000000000000000000004540012436652141011104 0ustar # Translation of debdelta in French. # Copyright © 2012 Debian French l10n team # This file is distributed under the same license as the debdelta package. # # Thomas Blein , 2012. msgid "" msgstr "" "Project-Id-Version: debdelta\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2012-08-02 15:28+0200\n" "PO-Revision-Date: 2012-08-02 23:25+0200\n" "Last-Translator: Thomas Blein \n" "Language-Team: French \n" "Language: fr\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "X-Generator: Lokalize 1.4\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" #: ../debdelta:30 msgid "" "Usage: debdelta [ option... ] fromfile tofile delta\n" " Computes the difference of two deb files, from fromfile to tofile, and " "writes it to delta\n" "\n" "Options:\n" "--signing-key KEY\n" " gnupg key used to sign the delta\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old deb is " "available\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs\n" msgstr "" "Usage : debdelta [OPTION] FICHIER_ORIGINE FICHIER_DESTINATION DELTA\n" " Calcule la différence entre deux fichiers deb, entre FICHIER_ORIGINE et " "FICHIER_DESTINATION, et l'inscrit dans DELTA.\n" "\n" "Options :\n" "--signing-key CLÉ\n" " clé gnupg à utiliser pour signer la différence\n" "--no-md5 ne pas inclure les informations MD5 de la différence\n" "--needsold créer un delta qui ne peut être utilisé que si le vieux deb\n" " est disponible\n" " -M Mb mémoire maximum à utiliser (pour 'bsdiff' ou 'xdelta')\n" "--delta-algo ALGO\n" " utilise une dorsale spécifique pour calculer les différences \n" " binaires\n" #: ../debdelta:45 msgid "" "Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files]\n" " Computes all missing deltas for deb files.\n" " It orders by version number and produce deltas to the newest version\n" "\n" "Options:\n" "--signing-key KEY\n" " key used to sign the deltas (using GnuPG)\n" "--dir DIR force saving of deltas in this DIR\n" " (otherwise they go in the dir of the newer deb_file)\n" "--old ARGS 'Packages' files containing list of old versions of debs\n" "--alt ARGS for any cmdline argument, search for debs also in this place\n" " -n N how many deltas to produce for each deb (default unlimited)\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old .deb is " "available\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs;\n" " possible values are: xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--clean-deltas delete deltas if newer deb is not in archive\n" msgstr "" "Usage : debdeltas [OPTION] [fichiers et dossiers deb, ou fichiers " "'Packages']\n" " Calcule tous les deltas manquants pour les fichiers deb.\n" " Les fichiers deb sont classés par numéro de version et des deltas sont " "produits par rapport à la version la plus récente.\n" "\n" "Options :\n" "--signing-key KEY\n" " clé utilisée pour signer les deltas (utilisant GnuPG)\n" "--dir RÉP force l'enregistrement des deltas dans le répertoire RÉP\n" " (sinon ils sont enregistrés dans le dossier du fichier deb le\n" " plus récent)\n" "--old ARGS fichier 'Packages' contenant une liste des anciennes versions\n" " des fichiers deb\n" "--alt ARGS pour tout paramètre de ligne de commande, rechercher aussi pour\n" " des debs également à cet endroit.\n" " -n N nombre de deltas à produire pour chaque deb\n" " (illimité par défaut)\n" "--no-md5 ne pas inclure les informations MD5 de la différence\n" "--needsold créer un delta qui ne peut être utilisé que si le vieux .deb\n" " est disponible\n" "--delta-algo ALGO\n" " utilise une dorsale spécifique pour calculer les différences\n" " binaires\n" " les valeurs possibles sont : xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb mémoire maximum à utiliser (pour 'bsdiff' ou 'xdelta')\n" "--clean-deltas\n" " supprimer les deltas si le deb plus récent n'est pas dans\n" " l'archive\n" #: ../debdelta:69 #| msgid "" #| "Usage: debpatch [ option... ] delta fromfile tofile \n" #| " Applies delta to fromfile and produces a reconstructed version of " #| "tofile.\n" #| "\n" #| "(When using 'debpatch' and the old .deb is not available,\n" #| " use '/' for the fromfile.)\n" #| "\n" #| "Usage: debpatch --info delta\n" #| " Write info on delta.\n" #| "\n" #| "Options:\n" #| "--no-md5 do not verify MD5 (if found in info in delta)\n" #| " -A accept unsigned deltas\n" msgid "" "Usage: debpatch [ option... ] delta fromfile tofile \n" " Applies delta to fromfile and produces a reconstructed version of " "tofile.\n" "\n" "(When using 'debpatch' and the old .deb is not available,\n" " use '/' for the fromfile.)\n" "\n" "Usage: debpatch --info delta\n" " Write info on delta.\n" "\n" "Options:\n" "--no-md5 do not verify MD5 (if found in info in delta)\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created deb\n" msgstr "" "Usage : debpatch [OPTION] DELTA FICHIER_ORIGINE FICHIER_DESTINATION \n" " Applique un DELTA sur FICHIER_ORIGINE et produit une version reconstruite " "de FICHIER_DESTINATION.\n" "\n" "(Lors de l'utilisation de 'debpatch' et si le vieux .deb n'est pas \n" "disponible, utilisez « / » comme FICHIER_ORIGINE.)\n" "\n" "Usage : debpatch --info DELTA\n" " Écrire les informations du DELTA.\n" "\n" "Options :\n" "--no-md5 ne pas vérifier le MD5 (si trouvé dans les infos de delta).\n" " -A accepter les deltas non signés.\n" "--format FORMAT\n" " format du deb créé\n" #: ../debdelta:86 #| msgid "" #| "Usage: debdelta-upgrade [package names]\n" #| " Downloads all deltas and apply them to create the debs\n" #| " that are needed by 'apt-get upgrade'.\n" #| "\n" #| "Options:\n" #| "--dir DIR directory where to save results\n" #| "--deb-policy POLICY\n" #| " policy to decide which debs to download,\n" #| " -A accept unsigned deltas\n" msgid "" "Usage: debdelta-upgrade [package names]\n" " Downloads all deltas and apply them to create the debs\n" " that are needed by 'apt-get upgrade'.\n" "\n" "Options:\n" "--dir DIR directory where to save results\n" "--deb-policy POLICY\n" " policy to decide which debs to download,\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created debs\n" msgstr "" "Usage : debdelta-upgrade [NOMS_DE_PAQUET]\n" " Télécharger tous les deltas et les appliquer pour créer les debs\n" " qui sont requis par « apt-get upgrade ».\n" "\n" "Options :\n" "--dir RÉP répertoire où les résultats seront enregistrés\n" "--deb-policy RÈGLE\n" " règle utilisée pour décider quels debs seront téléchargés.\n" " -A accepter les deltas non signés\n" "--format FORMAT\n" " format du deb créé\n" #: ../debdelta:100 msgid "" "Usage: debpatch-url [package names]\n" " Show URL wherefrom to downloads all deltas that may be used to upgrade the " "given package names\n" msgstr "" "Usage : debpatch-url [NOMS_DE_PAQUET]\n" " Afficher l'URL utilisée pour le téléchargement de tous les deltas qui " "peuvent être utilisés pour mettre à jour les paquets précisés.\n" #: ../debdelta:105 msgid "" " -v verbose (can be added multiple times)\n" "--no-act do not do that (whatever it is!)\n" " -d add extra debugging checks\n" " -k keep temporary files (use for debugging)\n" "--gpg-home HOME\n" " specify a different home for GPG\n" "\n" "See man page for more options and details.\n" msgstr "" " -v verbosité (peut être utilisé plusieurs fois)\n" "--no-act ne pas effectuer l'action (quelle qu'elle soit !)\n" " -d ajouter des vérifications de débogage supplémentaires\n" " -k garder les fichiers temporaires (utilisés à des fins de\n" " débogage)\n" "--gpg-home HOME\n" " indiquer un répertoire personnel différent pour GPG\n" "\n" "Consultez la page de manuel pour plus d'option et de détails.\n" #: ../debdelta:279 msgid "Error: argument of --dir is not a directory:" msgstr "Erreur : le paramètre de --dir n'est pas un répertoire :" #: ../debdelta:287 #| msgid "Error: argument of --dir is not a directory:" msgid "Error: argument of --forensicdir is not a directory:" msgstr "Erreur : le paramètre de --forensicdir n'est pas un répertoire :" #: ../debdelta:303 msgid "Error: argument of --alt is not a directory or a regular file:" msgstr "" "Erreur : le paramètre de --alt n'est pas un répertoire ou un fichier " "classique :" #: ../debdelta:308 msgid "Error: argument of --old is not a directory or a regular file:" msgstr "" "Erreur : le paramètre de --old n'est pas un répertoire ou un fichier " "classique :" #: ../debdelta:324 #, python-format msgid "Error: --gpg-home `%s' does not exist." msgstr "Erreur : --gpg-home « %s » n'existe pas." #: ../debdelta:328 #, python-format #| msgid "Error: option `%s' is unknown, try --help" msgid "Error: output format `%s' is unknown." msgstr "Erreur : le format de sortie « %s » n'est pas connu." #: ../debdelta:332 #, python-format msgid "Error: option `%s' is unknown, try --help" msgstr "Erreur : l'option « %s » n'est pas connue. Essayez --help." #: ../debdelta:337 #, python-format msgid "Error: feature `%s' cannot be disabled." msgstr "Erreur : la fonctionnalité « %s » ne peut pas être désactivée." #: ../debdelta:672 ../debdelta:682 #, python-format msgid "Error: the file `%s' does not exist." msgstr "Erreur : le fichier « %s » n'existe pas." #: ../debdelta:674 ../debdelta:684 #, python-format msgid "Error: `%s' is not a regular file." msgstr "Erreur : le fichier « %s » n'est pas un fichier classique." #: ../debdelta:677 #, python-format msgid "Error: `%s' does not seem to be a Debian package." msgstr "Erreur : « %s » ne semble pas être un paquet Debian." #: ../debdelta:687 #, python-format msgid "Error: `%s' does not seem to be a Debian delta." msgstr "Erreur : « %s » ne semble pas être un delta Debian." #: ../debdelta:984 #, python-format msgid "" "not enough disk space (%(free)dkB) in directory %(dir)s for applying delta " "(needs %(size)dkB)" msgstr "" "pas assez d'espace disque (%(free)d kB) dans le répertoire %(dir)s pour " "appliquer les deltas (besoin de %(size)d kB)." #: ../debdelta:1246 ../debdelta:1303 #| msgid "(Faulty delta. Please consider retrying with the option \"-d\" )." msgid "" "(Faulty delta. Please consider retrying with the option \"--forensic=http" "\" )." msgstr "" "(Delta défectueux. Veuillez réessayer avec l'option « --forensic=http »)." #: ../debdelta:1259 ../debdelta:1280 msgid "There were faulty deltas." msgstr "Il y a des deltas défectueux." #: ../debdelta:1259 msgid "Now invoking the mail sender to send the logs." msgstr "Invocation de l'envoyeur de mail pour envoyer les journaux." #: ../debdelta:1261 msgid "(hit any key)" msgstr "(Pressez n'importe quelle touche)" #: ../debdelta:1280 msgid "Sending logs to server." msgstr "Envoi des journaux sur le serveur." #: ../debdelta:1295 msgid "Server answers:" msgstr "Réponses du serveur :" #: ../debdelta:1298 #, python-format #| msgid "Faulty delta. Please send by email to %s the following files:" msgid "Faulty delta. Please send by email to %s the following files:\n" msgstr "" "Delta défectueux. Veuillez envoyer par courrier électronique à %s les " "fichiers suivants :\n" #: ../debdelta:1431 msgid "Delta is not signed:" msgstr "Le delta n'est pas signé :" #: ../debdelta:1433 msgid "WARNING, delta is not signed:" msgstr "ATTENTION, le delta n'est pas signé :" #: ../debdelta:2018 #, python-format msgid "Patching done, time %(time).2fsec, speed %(speed)dk/sec" msgstr "Correction effectuée, temps %(time).2f sec, vitesse %(speed)d k/sec." #: ../debdelta:2020 #, python-format msgid "(script %(time).2fsec %(speed)dk/sec)" msgstr "(script %(time).2f sec %(speed)d k/sec)" #: ../debdelta:2024 #, python-format msgid "(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)" msgstr "(prelink %(time).2f sec, %(size)d k, %(speed)d k/s)" #: ../debdelta:2026 ../debdelta:2028 #, python-format msgid "(unaccounted %.2fsec)" msgstr "(inexpliqué %.2f sec)" #: ../debdelta:3290 #, python-format msgid "" "delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of " "%(tot)dkB." msgstr "" "Le delta a une taille de %(perc)3.1f %% du deb ; c'est-à-dire que " "%(save)d kB ont été économisés sur un total de %(tot)d kB." #: ../debdelta:3292 #, python-format msgid "" "delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time " "%(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)" msgstr "" "delta temps %(time).2f sec, vitesse %(speed)d kB/sec, (%(algo)s temps " "%(algotime).2f sec vitesse %(algospeed)d kB/sec) (corr %(corrtime).2f sec)" #: ../debdelta:3305 msgid "Warning, no non-option arguments, debdeltas does nothing." msgstr "Attention, aucun paramètre non-option, aucune action de debdeltas." #: ../debdelta:3309 msgid "Warning, no --old arguments, debdeltas will not generate any deltas." msgstr "Attention, aucun paramètre --old, debdeltas ne générera aucun delta." #: ../debdelta:3714 #, python-format msgid "Not enough disk space for storing `%s'." msgstr "Pas assez d'espace disque pour enregistrer « %s »" #: ../debdelta:3771 msgid "Creating:" msgstr "Création :" #: ../debdelta:3808 msgid "Error: testing of delta failed:" msgstr "Erreur : les tests de delta ont échoué :" #: ../debdelta:3855 #, python-format msgid "Total running time: %.1f" msgstr "Temps d'exécution total : %.1f" #: ../debdelta:3970 #, python-format msgid "(sources.conf does not provide a server for `%s')" msgstr "(sources.conf ne fournit aucun serveur pour « %s »)" #: ../debdelta:3985 msgid "" " Proxy settings detected in the environment; using \"urllib2\" for " "downloading; but\n" " this disables some features and is in general slower and buggier. See man " "page." msgstr "" "Configuration de proxy détectée dans l'environnement ; utilisation de " "« urllib2 » pour le téléchargement ; mais cela désactive certaines " "caractéristiques et c'est généralement plus lent. Consultez la page de " "manuel." #: ../debdelta:3997 ../debdelta:3999 msgid "Initializing APT cache..." msgstr "Initialisation du cache APT…" #: ../debdelta:4027 msgid "Upgrading APT cache..." msgstr "Mise à niveau du cache APT…" #: ../debdelta:4029 msgid "upgrading..." msgstr "mise à niveau…" #: ../debdelta:4036 #| msgid "Upgrading APT cache..." msgid "Failed! Safe upgrading APT cache..." msgstr "Échec ! Mise à niveau conservative du cache APT…" #: ../debdelta:4038 msgid "failed! trying safe-upgrade..." msgstr "Échec ! Essai d'une mise à niveau conservative (« safe-upgrade »)" #: ../debdelta:4042 msgid "Upgraded APT cache." msgstr "Le cache APT a été mis à niveau." #: ../debdelta:4044 msgid "done." msgstr "Effectué." #: ../debdelta:4081 #, python-format msgid "Recreated debs are saved in the directory %s" msgstr "Les debs recréés sont sauvés dans le répertoire %s." #: ../debdelta:4124 #, python-format msgid "Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "Créé, temps %(time)5.2f sec, vitesse %(speed)4s/sec, %(name)s" #: ../debdelta:4395 msgid "Not enough disk space to download:" msgstr "Pas assez d'espace disque pour télécharger :" #: ../debdelta:4420 #, python-format msgid "Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "Téléchargé, temps %(time)5.2f sec, vitesse %(speed)4s/sec, %(name)s" #: ../debdelta:4556 #, python-format msgid "Downloading head of %s..." msgstr "Téléchargement de l'entête de %s…" #: ../debdelta:4576 msgid "Delta is too big:" msgstr "Le delta est trop important :" #: ../debdelta:4582 ../debdelta:4584 msgid "Delta is not present:" msgstr "Le delta est absent :" #: ../debdelta:4594 #, python-format msgid "Downloaded head of %s." msgstr "En-tête du fichier %s téléchargé." #: ../debdelta:4632 #, python-format msgid "Deltas: %(present)d present and %(absent)d not," msgstr "Deltas : %(present)d présent et %(absent)d absent," #: ../debdelta:4634 #, python-format msgid "" "downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec." msgstr "" "déjà téléchargé : %(time).2f sec, taille %(size)s, vitesse de %(speed)4s/sec." #: ../debdelta:4637 #, python-format msgid "Need to get %s of deltas." msgstr "Besoin de récupérer %s des deltas." #: ../debdelta:4673 msgid "You may wish to rerun, to get also:" msgstr "Vous pourriez relancer, pour obtenir également :" #: ../debdelta:4728 msgid "Delta-upgrade statistics:" msgstr "Statistiques de delta-upgrade :" #: ../debdelta:4729 #, python-format msgid "size %(size)s time %(time)dsec speed %(speed)s/sec" msgstr "taille %(size)s temps %(time)d sec vitesse %(speed)s/sec" #: ../debdelta:4734 msgid "downloaded deltas, " msgstr "deltas téléchargés, " #: ../debdelta:4739 msgid "patching to debs, " msgstr "correction des debs, " #: ../debdelta:4744 msgid "downloaded debs, " msgstr "debs téléchargé, " #: ../debdelta:4749 #, python-format msgid "" "total resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/" "sec" msgstr "" "debs résultant totaux, taille %(size)s temps %(time)d sec vitesse virtuelle " "%(speed)s/sec" #: ../debdelta:4767 msgid "Need a filename; try --help." msgstr "Besoin d'un nom de fichier ; essayez --help." #: ../debdelta:4788 ../debdelta:4812 msgid "Need 3 filenames; try --help." msgstr "Besoin de trois noms de fichier ; essayez --help." #: ../debdelta:4832 msgid "Error: argument is not a directory or a regular file:" msgstr "" "Erreur : le paramètre n'est pas un répertoire ou un fichier classique :" #: ../debdelta:4885 #, python-format msgid "Lookup %s" msgstr "Recherche %s" #: ../debdelta:4891 #, python-format msgid "Sorry, cannot find an URI to download the debian package of `%s'." msgstr "" "Désolé, impossible de trouver une URI pour télécharger le paquet debian " "« %s »." #: ../debdelta:4907 #, python-format msgid "Sorry, no source is available to upgrade `%s'." msgstr "Désolé, aucune source disponible pour la mise à niveau de « %s »." #: ../debdelta:4911 #, python-format msgid "Sorry, the package `%s' is already at its newest version." msgstr "Désolé, le paquet « %s » est déjà à la version la plus récente." debdelta/po/pt.po0000644000000000000000000004146412436652141011126 0ustar # Translation of debdelta's messages to European Portuguese # Copyright (C) 2014 THE debdelta'S COPYRIGHT HOLDER # This file is distributed under the same license as the debdelta package. # # Américo Monteiro , 2014. msgid "" msgstr "" "Project-Id-Version: debdelta 0.50+2\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2012-08-26 22:24+0200\n" "PO-Revision-Date: 2014-09-07 12:04+0100\n" "Last-Translator: Américo Monteiro \n" "Language-Team: Portuguese \n" "Language: pt\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" "X-Generator: Lokalize 1.4\n" #: ../debdelta:30 msgid "" "Usage: debdelta [ option... ] fromfile tofile delta\n" " Computes the difference of two deb files, from fromfile to tofile, and " "writes it to delta\n" "\n" "Options:\n" "--signing-key KEY\n" " gnupg key used to sign the delta\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old deb is " "available\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs\n" msgstr "" "Utilização: debdelta [ opção... ] fromfile tofile delta\n" " Computa a diferença de dois ficheiros deb, de fromfile para tofile, e " "escreve-a no delta\n" "\n" "Opções:\n" "--signing-key CHAVE\n" " chave gnupg usada para assinar o delta\n" "--no-md5 não inclui informação MD5 no delta\n" "--needsold cria um delta que só pode ser usado se o deb antigo estiver " "disponível" " -M Mb máximo de memória a usar (para 'bsdiff' ou 'xdelta')\n" "--delta-algo ALGO\n" " usa um backend específico para computar diferenças de binários\n" #: ../debdelta:45 msgid "" "Usage: debdeltas [ option... ] [deb files and dirs, or 'Packages' files]\n" " Computes all missing deltas for deb files.\n" " It orders by version number and produce deltas to the newest version\n" "\n" "Options:\n" "--signing-key KEY\n" " key used to sign the deltas (using GnuPG)\n" "--dir DIR force saving of deltas in this DIR\n" " (otherwise they go in the dir of the newer deb_file)\n" "--old ARGS 'Packages' files containing list of old versions of debs\n" "--alt ARGS for any cmdline argument, search for debs also in this place\n" " -n N how many deltas to produce for each deb (default unlimited)\n" "--no-md5 do not include MD5 info in delta\n" "--needsold create a delta that can only be used if the old .deb is " "available\n" "--delta-algo ALGO\n" " use a specific backend for computing binary diffs;\n" " possible values are: xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb maximum memory to use (for 'bsdiff' or 'xdelta')\n" "--clean-deltas delete deltas if newer deb is not in archive\n" msgstr "" "Utilização: debdeltas [ opção... ] [ficheiros deb e directórios, ou " "ficheiros 'Packages']\n" " Computa todos os deltas em falta para ficheiros deb.\n" " Organiza por número de versão e produz deltas para a versão mais recente\n" "\n" "Opções:\n" "--signing-key CHAVE\n" " chave usada para assinar os deltas (usando GnuPG)\n" "--dir DIRECTÓRIO força a gravação dos deltas neste DIRECTÓRIO\n" " (de outro modo eles irão para o directório do ficheiro deb " "mais recente)\n" "--old ARGUMENTOS ficheiros 'Packages' que contém uma lista de versões " "antigas de debs\n" "--alt ARGUMENTOS para qualquer argumento de linha de comandos, procure " "também por debs aqui\n" " -n N quantos deltas produzir para cada deb (predefinição ilimitado)\n" "--no-md5 não inclui informação MD5 no delta\n" "--needsold cria um delta que apenas pode ser usado se o antigo .deb " "estiver disponível\n" "--delta-algo ALGO\n" " usa um backend específico para computar diferenças de binários;\n" " os valores possíveis são: xdelta xdelta-bzip xdelta3 bsdiff\n" " -M Mb máximo de memória a usar (para 'bsdiff' ou 'xdelta')\n" "--clean-deltas apaga os deltas se o deb mais recente não estiver no " "arquivo\n" #: ../debdelta:69 msgid "" "Usage: debpatch [ option... ] delta fromfile tofile \n" " Applies delta to fromfile and produces a reconstructed version of " "tofile.\n" "\n" "(When using 'debpatch' and the old .deb is not available,\n" " use '/' for the fromfile.)\n" "\n" "Usage: debpatch --info delta\n" " Write info on delta.\n" "\n" "Options:\n" "--no-md5 do not verify MD5 (if found in info in delta)\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created deb\n" msgstr "" "Utilização: debpatch [ opção... ] delta de-ficheiro para-ficheiro \n" " Aplica o delta ao de-ficheiro e produz uma versão reconstruída de " "para-ficheiro.\n" "\n" "(Quando se usa 'debpatch' e o .deb antigo não está disponível,\n" " use '/' para o de-ficheiro.)\n" "\n" "Utilização: debpatch --info delta\n" " Escreve informação no delta.\n" "\n" "Opções:\n" "--no-md5 não verifica o MD5 (se encontrado em informações no delta)\n" " -A aceita deltas não assinados\n" "--format FORMATO\n" " formato do deb criado\n" #: ../debdelta:86 msgid "" "Usage: debdelta-upgrade [package names]\n" " Downloads all deltas and apply them to create the debs\n" " that are needed by 'apt-get upgrade'.\n" "\n" "Options:\n" "--dir DIR directory where to save results\n" "--deb-policy POLICY\n" " policy to decide which debs to download,\n" " -A accept unsigned deltas\n" "--format FORMAT\n" " format of created debs\n" msgstr "" "Utilização: debdelta-upgrade [nomes de pacotes]\n" " Descarrega todos os deltas e aplica-os para criar os debs que são\n" " necessários pelo 'apt-get upgrade'.\n" "\n" "Opções:\n" "--dir DIRECTÓRIO directório onde guardar os resultados\n" "--deb-policy POLITICA\n" " politica para decidir quais debs descarregar,\n" " -A aceita deltas não assinados\n" "--format FORMATO\n" " formato dos debs criados\n" #: ../debdelta:100 msgid "" "Usage: debpatch-url [package names]\n" " Show URL wherefrom to downloads all deltas that may be used to upgrade the " "given package names\n" msgstr "" "Utilização: debpatch-url [nomes de pacotes]\n" " Mostra o URL de onde descarregar todos os deltas que possam ser usados " "para actualizar os nomes de pacotes fornecidos\n" #: ../debdelta:105 msgid "" " -v verbose (can be added multiple times)\n" "--no-act do not do that (whatever it is!)\n" " -d add extra debugging checks\n" " -k keep temporary files (use for debugging)\n" "--gpg-home HOME\n" " specify a different home for GPG\n" "\n" "See man page for more options and details.\n" msgstr "" " -v detalhado (pode ser adiciona várias vezes)\n" "--no-act não faz isso (seja o que for!)\n" " -d adiciona verificações extra de depuração\n" " -k mantêm os ficheiros temporários (usado para depuração)\n" "--gpg-home HOME\n" " especifica uma home diferente para GPG\n" "\n" "Veja o manual para mais opções e detalhes.\n" #: ../debdelta:279 msgid "Error: argument of --dir is not a directory:" msgstr "Erro: argumento de --dir não é um directório:" #: ../debdelta:287 msgid "Error: argument of --forensicdir is not a directory:" msgstr "Erro: argumento de -forensicdir não é um directório:" #: ../debdelta:303 msgid "Error: argument of --alt is not a directory or a regular file:" msgstr "Erro: argumento de --alt não é um directório nem um ficheiro regular:" #: ../debdelta:308 msgid "Error: argument of --old is not a directory or a regular file:" msgstr "Erro: argumento de --old não é um directório nem um ficheiro regular:" #: ../debdelta:324 #, python-format msgid "Error: --gpg-home `%s' does not exist." msgstr "Erro: --gpg-home `%s' não existe." #: ../debdelta:328 #, python-format msgid "Error: output format `%s' is unknown." msgstr "Erro: formato de saída `%s' é desconhecido." #: ../debdelta:332 #, python-format msgid "Error: option `%s' is unknown, try --help" msgstr "Erro: a opção `%s' é desconhecida, tente --help" #: ../debdelta:337 #, python-format msgid "Error: feature `%s' cannot be disabled." msgstr "Erro: a funcionalidade `%s' não pode ser desactivada." #: ../debdelta:672 ../debdelta:682 #, python-format msgid "Error: the file `%s' does not exist." msgstr "Erro: o ficheiro `%s' não existe." #: ../debdelta:674 ../debdelta:684 #, python-format msgid "Error: `%s' is not a regular file." msgstr "Erro: `%s' não é um ficheiro regular." #: ../debdelta:677 #, python-format msgid "Error: `%s' does not seem to be a Debian package." msgstr "Erro: `%s' não parece ser um pacote Debian." #: ../debdelta:687 #, python-format msgid "Error: `%s' does not seem to be a Debian delta." msgstr "Erro: `%s' não parece ser um delta Debian." #: ../debdelta:984 #, python-format msgid "" "not enough disk space (%(free)dkB) in directory %(dir)s for applying delta " "(needs %(size)dkB)" msgstr "" "não há espaço de disco suficiente (%(free)dkB) no directório %(dir)s para " "aplicar o delta (precisa de %(size)dkB)" #: ../debdelta:1246 ../debdelta:1303 msgid "" "(Faulty delta. Please consider retrying with the option \"--forensic=http" "\" )." msgstr "" "(Delta com defeito. Por favor considere tentar de novo com o opção " "\"--forensic=http\" )." #: ../debdelta:1259 ../debdelta:1280 msgid "There were faulty deltas." msgstr "Existiram deltas com defeito." #: ../debdelta:1259 msgid "Now invoking the mail sender to send the logs." msgstr "Agora a invocar o transporte de mail para enviar os relatórios." #: ../debdelta:1261 msgid "(hit any key)" msgstr "(carregue em qualquer tecla)" #: ../debdelta:1280 msgid "Sending logs to server." msgstr "A enviar relatórios para o servidor." #: ../debdelta:1295 msgid "Server answers:" msgstr "Resposta do servidor:" #: ../debdelta:1298 #, python-format msgid "Faulty delta. Please send by email to %s the following files:\n" msgstr "" "Delta com defeito. Por favor envie os seguintes ficheiros por email para " "%s:\n" #: ../debdelta:1431 msgid "Delta is not signed:" msgstr "Delta não está assinado:" #: ../debdelta:1433 msgid "WARNING, delta is not signed:" msgstr "AVISO, delta não está assinado:" #: ../debdelta:2018 #, python-format msgid "Patching done, time %(time).2fsec, speed %(speed)dk/sec" msgstr "" "Aplicação de patch pronta, tempo %(time).2fsec, velocidade %(speed)dk/sec" #: ../debdelta:2020 #, python-format msgid "(script %(time).2fsec %(speed)dk/sec)" msgstr "(script %(time).2fsec %(speed)dk/sec)" #: ../debdelta:2024 #, python-format msgid "(prelink %(time).2fsec, %(size)dk, %(speed)dk/s)" msgstr "(pré-link %(time).2fsec, %(size)dk, %(speed)dk/s)" #: ../debdelta:2026 ../debdelta:2028 #, python-format msgid "(unaccounted %.2fsec)" msgstr "(não contado %.2fsec)" #: ../debdelta:3290 #, python-format msgid "" "delta is %(perc)3.1f%% of deb; that is, %(save)dkB are saved, on a total of " "%(tot)dkB." msgstr "" "delta é %(perc)3.1f%% de deb; isto é, %(save)dkB são salvados, num total " "de %(tot)dkB." #: ../debdelta:3292 #, python-format msgid "" "delta time %(time).2f sec, speed %(speed)dkB /sec, (%(algo)s time " "%(algotime).2fsec speed %(algospeed)dkB /sec) (corr %(corrtime).2f sec)" msgstr "" "tempo de delta %(time).2f sec, velocidade %(speed)dkB /sec, (%(algo)s " "tempo %(algotime).2fsec velocidade %(algospeed)dkB /sec) " "(corr %(corrtime).2f sec)" #: ../debdelta:3305 msgid "Warning, no non-option arguments, debdeltas does nothing." msgstr "Aviso, nenhum argumento não-opção, o debdeltas não faz nada." #: ../debdelta:3309 msgid "Warning, no --old arguments, debdeltas will not generate any deltas." msgstr "" "Aviso, nenhum argumento --old, o debdeltas não irá gerar nenhuns deltas." #: ../debdelta:3714 #, python-format msgid "Not enough disk space for storing `%s'." msgstr "Não há espaço de disco suficiente para armazenar `%s'." #: ../debdelta:3771 msgid "Creating:" msgstr "A criar:" #: ../debdelta:3808 msgid "Error: testing of delta failed:" msgstr "Erro: testes do delta falhados:" #: ../debdelta:3855 #, python-format msgid "Total running time: %.1f" msgstr "Tempo total de execução: %.1f" #: ../debdelta:3970 #, python-format msgid "(sources.conf does not provide a server for `%s')" msgstr "(sources.conf não fornece um servidor para `%s')" #: ../debdelta:3985 msgid "" " Proxy settings detected in the environment; using \"urllib2\" for " "downloading; but\n" " this disables some features and is in general slower and buggier. See man " "page." msgstr "" " Detectadas configurações de Proxy no ambiente; a usar \"urllib2\" para " "descarga, mas\n" " isto desactiva algumas funcionalidades e é em geral mais lento e " "problemático. Veja o manual." #: ../debdelta:3997 ../debdelta:3999 msgid "Initializing APT cache..." msgstr "a inicializar a cache do APT..." #: ../debdelta:4027 msgid "Upgrading APT cache..." msgstr "a actualizar a cache do APT..." #: ../debdelta:4029 msgid "upgrading..." msgstr "a actualizar..." #: ../debdelta:4036 msgid "Failed! Safe upgrading APT cache..." msgstr "Falhado! A fazer actualização segura da cache de APT..." #: ../debdelta:4038 msgid "failed! trying safe-upgrade..." msgstr "falhado! a tentar actualização segura..." #: ../debdelta:4042 msgid "Upgraded APT cache." msgstr "Cache de APT actualizada." #: ../debdelta:4044 msgid "done." msgstr "feito." #: ../debdelta:4081 #, python-format msgid "Recreated debs are saved in the directory %s" msgstr "Os debs recriados são guardados no directório %s" #: ../debdelta:4124 #, python-format msgid "Created, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "Criado, tempo %(time)5.2fsec, velocidade %(speed)4s/sec, %(name)s" #: ../debdelta:4395 msgid "Not enough disk space to download:" msgstr "Não há espaço de disco suficiente para descarregar." #: ../debdelta:4420 #, python-format msgid "Downloaded, time %(time)5.2fsec, speed %(speed)4s/sec, %(name)s" msgstr "" "Descarregado, tempo %(time)5.2fsec, velocidade %(speed)4s/sec, %(name)s" #: ../debdelta:4556 #, python-format msgid "Downloading head of %s..." msgstr "A descarregar cabeça de %s..." #: ../debdelta:4576 msgid "Delta is too big:" msgstr "Delta é demasiado grande:" #: ../debdelta:4582 ../debdelta:4584 msgid "Delta is not present:" msgstr "Delta não está presente:" #: ../debdelta:4594 #, python-format msgid "Downloaded head of %s." msgstr "Descarregada cabeça de %s." #: ../debdelta:4632 #, python-format msgid "Deltas: %(present)d present and %(absent)d not," msgstr "Deltas: %(present)d presentes e %(absent)d não presentes," #: ../debdelta:4634 #, python-format msgid "" "downloaded so far: time %(time).2fsec, size %(size)s, speed %(speed)4s/sec." msgstr "" "descarregado até agora: tempo %(time).2fsec, tamanho %(size)s, velocidade " "%(speed)4s/sec." #: ../debdelta:4637 #, python-format msgid "Need to get %s of deltas." msgstr "Preciso de obter %s de deltas." #: ../debdelta:4673 msgid "You may wish to rerun, to get also:" msgstr "Você pode desejar voltar a executar, para também obter:" #: ../debdelta:4728 msgid "Delta-upgrade statistics:" msgstr "Estatísticas de actualização de delta:" #: ../debdelta:4729 #, python-format msgid "size %(size)s time %(time)dsec speed %(speed)s/sec" msgstr "tamanho %(size)s tempo %(time)dsec velocidade %(speed)s/sec" #: ../debdelta:4734 msgid "downloaded deltas, " msgstr "deltas descarregados, " #: ../debdelta:4739 msgid "patching to debs, " msgstr "a aplicar patches aos debs, " #: ../debdelta:4744 msgid "downloaded debs, " msgstr "debs descarregados, " #: ../debdelta:4749 #, python-format msgid "" "total resulting debs, size %(size)s time %(time)dsec virtual speed %(speed)s/" "sec" msgstr "" "total de debs resultantes, tamanho %(size)s tempo %(time)dsec velocidade " "virtual %(speed)s/sec" #: ../debdelta:4767 msgid "Need a filename; try --help." msgstr "Preciso de um nome de ficheiro; tente --help." #: ../debdelta:4788 ../debdelta:4812 msgid "Need 3 filenames; try --help." msgstr "Preciso de 3 nomes de ficheiros; tente --help." #: ../debdelta:4832 msgid "Error: argument is not a directory or a regular file:" msgstr "Erro: argumento não é um directório nem um ficheiro regular:" #: ../debdelta:4885 #, python-format msgid "Lookup %s" msgstr "Procurar %s" #: ../debdelta:4891 #, python-format msgid "Sorry, cannot find an URI to download the debian package of `%s'." msgstr "" "Desculpe, não consigo encontrar um URL para descarregar o pacote debian " "de `%s'." #: ../debdelta:4907 #, python-format msgid "Sorry, no source is available to upgrade `%s'." msgstr "Desculpe, não há fontes disponíveis para actualizar `%s'." #: ../debdelta:4911 #, python-format msgid "Sorry, the package `%s' is already at its newest version." msgstr "Desculpe, o pacote `%s' já está na sua versão mais recente." debdelta/THANKS0000644000000000000000000000050312436652141010425 0ustar Thanks to: Otavio Salvador, for some help with python-apt Michael Vogt, for a lot of help with python-apt Paul Wise, for a LOT of testing Lars Wirzenius, for some patches Pär Andersson, for lzma support roman@khimov.ru for prelink support http://www.wingware.com , for WingIDE, free license, for this project debdelta/contrib/0000755000000000000000000000000012436652141011154 5ustar debdelta/contrib/empty_0.0_all.deb0000644000000000000000000000113212436652141014170 0ustar ! debian-binary/ 1266063208 1013 1013 100644 4 ` 2.0 control.tar.gz/ 1266063331 1013 1013 100644 299 ` vKAk B$MavPֱVf4t~6a;FEѧ>ڻ-ijsG(9D]GUYeX1eX+R:ZҚt_fJF҇X\Zkyw5)x`xd<'syيM0>tue󱨃QS4t0rSNIi#1lHԬ]|_g( data.tar.gz/ 1266063366 1013 1013 100644 109 ` vKα 1+-z@ @@@@h&`71vWf+3jYGZm.IJ}?zOCX( debdelta/contrib/debmirror-delta-security_ubuntu.conf0000644000000000000000000000366212436652141020355 0ustar # basedir BASEDIR=/mirror # tmpdir to use TMPDIR=$BASEDIR/tmp # patched debmirror script so that --trash is supported DEBMIRROR=$BASEDIR/bin/debmirror.mine # the --tash commandline, this can be unset here if you don't have # a patched debmirror (also in this case no deltas from security1 # to security2 will be generated DEBMIRROR_TRASH="--trash $deltamir/old_debs" DEBMIRROR_METHOD="http" DEBMIRROR_OPTIONS="--passive" #where the security archive is sechost=security.ubuntu.com release=ubuntu #list of architecture (space separated, comma separated) ARCHs="i386 amd64" ARCHc="i386,amd64" # the archive sections to use SECTIONS="main restricted universe multiverse" #where to download the full mirror of ubuntu stable security secdebmir=$BASEDIR/ubuntu-security #the lock used by debmirror secdebmirlock=$secdebmir/Archive-Update-in-Progress-`hostname -f` #where is the full mirror of ubuntu stable fulldebmir=$BASEDIR/ubuntu #the name by which "stable" is known in that mirror origstable="lucid" #the corresponding path snippet for the security archive secstable="lucid-security" #where the gnupg stuff specific to debdelta resides GNUPGHOME=$BASEDIR/gnupg #where the gnupg agent variable is stored #(unset this if you do not want to use the agent) # note that this needs gnupg2 >= 2.0.11 (that is not lenny) GNUPGAGENTINFO="${GNUPGHOME}/debdeltas-gpg-agent-info" #the secret key to sign the deltas GNUPGSEC=THESECRETKEY #note: if you export this , then it will affect also debmirror # make sure the deb repository key is copied there #export GNUPGHOME #where to create the repository of deltas deltamir=$BASEDIR/ubuntu-security-deltas #where is the debdeltas program debdeltas=/usr/bin/debdeltas #options to your taste # for lenny #debdelta_opt=" -n 3 --disable-feature lzma --disable-feature xdelta3-fifo --gpg-home ~/debdelta/gnupg --sign $GNUPGSEC" # for squeeze debdelta_opt=" -n 3 --delta-algo xdelta3 --gpg-home ${GNUPGHOME} --sign $GNUPGSEC" debdelta/contrib/debmirror-marshal-delta-security0000755000000000000000000000656012436652141017457 0ustar #!/bin/bash set -e # A better simple script that will create a repository of deltas, that may be # used by debdelta-upgrade for upgrading packages in stable-security # This works with mirrors created by 'debmirror --debmarshal', that keep older versions # of the repositories # Copyright (C) 2011 Andrea Mennucci. # With a contribution by Torsten Spindler. # License: GNU Library General Public License, version 2 or later #who I am b=`basename $0` TMPDIR=/mirror/tmp export TMPDIR DEBUG='' VERBOSE='' [ "$1" = '-v' ] && { VERBOSE='-v' ; shift ; } [ "$1" = '-d' ] && { DEBUG='--debug' ; } [ "$1" = '-v' ] && { VERBOSE='-v' ; shift ; } if [ "$1" = '' ] || test ! -r "$1" ; then echo please provide the configuration file as argument exit 1 fi . "$1" # set gpg-agent variables, test it gpgagentcmd="gpg-agent --homedir \"${GNUPGHOME}\" --daemon --write-env-file \"$GNUPGAGENTINFO\" " if test "$GNUPGAGENTINFO" ; then if test ! -r "$GNUPGAGENTINFO" ; then echo ERROR no agent info, please start the agent with echo $gpgagentcmd exit 1 else . "$GNUPGAGENTINFO" export GPG_AGENT_INFO if test ! "${GPG_AGENT_INFO}" -o ! -e "${GPG_AGENT_INFO/:*/}" -o ! -O "${GPG_AGENT_INFO/:*/}" ; then echo ERROR agent info is not OK, please run the command echo $gpgagentcmd exit 1 elif ! echo | gpg-connect-agent --homedir ${GNUPGHOME} ; then echo ERROR agent is not responding, please run the command echo $gpgagentcmd exit 1 fi fi fi #test that we can sign, possibly loading the password in the agent if test "$GNUPGSEC" ; then t=`tempfile` echo pippo > $t if ! gpg2 --quiet --batch --homedir "${GNUPGHOME}" -o /dev/null --default-key $GNUPGSEC --sign $t ; then echo signature test FAILED rm $t exit 1 fi rm $t fi #do mirror security trap "rm $VERBOSE -f $secdebmirlock ; echo MIRRORING FAILED " 0 #this needs a version of 'debmirror' >= 2.5 debmirror $secdebmir --debmarshal $DEBUG $VERBOSE \ --nosource -h $sechost --method=http --diff=none \ -r $release -d ${secstable} -s main,contrib,non-free --arch=$ARCHc #do create deltas lockfile -r 1 /tmp/$b.lock || exit 1 trap "rm $VERBOSE -f /tmp/$b.lock; echo DELTA CREATION FAILED " 0 cd $secdebmir for arch in $ARCHs ; do for sec in main contrib non-free ; do wfo="--old $fulldebmir/dists/${origstable}/$sec/binary-$arch/Packages.gz" #add all older versions of "origstable" (they are not a lot) c=0 while test -r "$fulldebmir/dists/${origstable}/$c/$sec/binary-$arch/Packages.gz" ; do wfo="$wfo --old $fulldebmir/dists/${origstable}/$c/$sec/binary-$arch/Packages.gz" let c=c+1 done #add all older versions of "secstable", different from the actual one (they are not a lot) c=0 while test -r "dists/${secstable}/$c/$sec/binary-$arch/Packages.gz" && \ ! cmp "dists/${secstable}/$c/$sec/binary-$arch/Packages.gz" \ "dists/${secstable}/$sec/binary-$arch/Packages.gz" ; do wfo="$wfo --old dists/${secstable}/$c/$sec/binary-$arch/Packages.gz" let c=c+1 done ## echo $debdeltas $VERBOSE -v --test $debdelta_opt $wfo \ --dir $deltamir// dists/${secstable}/$sec/binary-$arch/Packages.gz done done #do clean up a bit trap "" 0 rm $VERBOSE -f /tmp/$b.lock find $deltamir/pool \ \( -name '*debdelta-fails' -or -name '*debdelta-too-big' \ -or -name '*debdelta' \) -mtime +80 -type f |\ xargs -r rm || true debdelta/contrib/debmirror_2.6_--trash_option.patch0000644000000000000000000000637612436652141017506 0ustar --- debmirror-2.6/debmirror 2009-12-19 22:21:28.000000000 +0100 +++ debmirror 2010-01-19 11:29:40.000000000 +0100 @@ -62,6 +62,7 @@ [--di-dist=foo[,bar,..] ...] [--di-arch=foo[,bar,..] ...] [--source|--nosource] [--i18n] [--getcontents] [--md5sums] [--ignore-missing-release] [--ignore-release-gpg] + [--trash=dir] [--ignore=regex] [--exclude=regex] [--include=regex] [--exclude-deb-section=regex] [--limit-priority=regex] [--timeout=seconds] [--max-batch=number] @@ -272,6 +273,10 @@ Don't fail if the F file is missing. If the file does exist, it is mirrored and verified, but any errors are ignored. +=item --trash=dir + +When cleaning up, instead of deleting, move files in this directory. + =item --ignore=regex Never delete any files whose filenames match the regex. May be used multiple times. @@ -520,6 +525,8 @@ use Digest::MD5; use Digest::SHA1; use LWP::UserAgent; +use File::Basename; +use File::Copy; # Yeah, I use too many global variables in this program. our $mirrordir; @@ -534,6 +541,7 @@ our $cleanup=0; our $post_cleanup=1; our $no_cleanup=0; +our $trash_dir=0; our $do_source=1; our $host="ftp.debian.org"; our $user="anonymous"; @@ -630,6 +638,7 @@ 'rsync-extra=s' => \@rsync_extra, 'cleanup' => \$cleanup, 'postcleanup' => \$post_cleanup, + 'trash=s' => \$trash_dir, 'nocleanup' => \$no_cleanup, 'ignore=s' => \@ignores, 'exclude=s' => \@excludes, @@ -1402,6 +1411,24 @@ exit; +sub delete_or_trash { + my $file = shift; + if ( ! $trash_dir ) { + say("deleting $file") if ($verbose); + if (! $dry_run) { + unlink $file or die "unlink $file: $!"; + } + } else { + say("trashing $file") if ($verbose); + if (! $dry_run) { + rename ($file, ($trash_dir . "/" . basename($file) )) or + { copy ($file, ($trash_dir . "/" . basename($file) )) and + unlink $file + } or die "cannot move or copy $file to trash: $!"; + } + } +} + sub print_dl_size { my $size=shift; my $unit; @@ -2255,10 +2282,7 @@ foreach my $file (sort keys %files) { next if (@di_dists && $file =~ m:installer-\w+/current/images/:); if ($files{$file} == 2 && -f $file) { - say("deleting $file") if ($verbose); - if (! $dry_run) { - unlink $file or die "unlink $file: $!"; - } + delete_or_trash($file) } } } else { @@ -2273,10 +2297,7 @@ next if (@di_dists && $file =~ m:installer-\w+/current/images/:); unless ((exists $files{$file} && $files{$file} != 2) or (defined($ignore) && $file=~/$ignore/o)) { - say("deleting $file") if ($verbose); - if (! $dry_run) { - unlink $file or die "unlink $file: $!"; - } + delete_or_trash($file) } } } @@ -2419,10 +2440,7 @@ $file=~s:^\./::; if (! exists $di_files{$image_dir} || ! exists $di_files{$image_dir}{$file}) { next if (exists $di_files{$image_dir} && $file eq "MD5SUMS"); - say("deleting $image_dir/$file") if ($verbose); - if (! $dry_run) { - unlink "$file" or die "unlink $image_dir/$file: $!\n"; - } + delete_or_trash($file); } } chdir("$mirrordir") or die "unable to chdir($tempdir): $!\n"; debdelta/contrib/dpkg-sig0000755000000000000000000016014012436652141012611 0ustar #!/usr/bin/perl -w # # dpkg-sig signs deb-files in a standard way # # (c) Andreas Barth 2003 # (c) Marc Brockschmidt 2004 # # This patched version is also compatible with signatures in 'debdelta' files, # with deb files where data.tar is compressed by bzip2 or lzma, # and with the file format of the '/usr/bin/ar' tool # (that adds a trailing "/" to filenames in the headers). # # (c) Andrea Mennucc 2009 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Upstream web site is http://dpkg-sig.turmzimmer.net/ =pod =head1 NAME B - Debian package archive (.deb) signature generation and verification tool =head1 DESCRIPTION This is the description of the source code, trying to help people to understand how B works. =head1 SYNOPSIS =cut use strict; use Getopt::Long; use Data::Dumper; use IPC::Open2; use IPC::Open3; use File::Temp qw(tempdir); use File::Copy qw(move); use File::Basename qw(dirname basename); $| = 1; #Global variables (used for configuration and/or command line stuff) my ($sign, $list, $verify, $verify_role, $verify_exact, $client, $cache_pass, $pass_file, $key, $maintainer, $maintainer_pr, $verbose, %config, $tempdir, %part_cache, $check_v2_sig, $check_v3_sig, $batch, $gpgoptions, $passphrase, $remote_dpkg_sig, %ssh_connections, $sign_changes, $get_hashes, $sign_hashes, $write_signature, $help, $DEBUG, $verify_pattern, $remote_ssh_port); my @configfiles = qw(/etc/devscripts.conf ~/.devscripts); my $DPKG_SIG_VERSION = 0.13; &process_cli_options(); #If wanted, print only help and exit: if ($help) { &help; exit; } #In client mode, we wait for commands and STDIN, we don't need the rest: if ($client) { print "Welcome. This is dpkg-sig in client mode. Protocol version 6\n"; &read_cmds(); exit; } #Only load Config::File if we're not in the client mode: eval { require Config::File; import Config::File qw(read_config_file); }; _die($@) if $@; $tempdir = tempdir("debsigs-ng.XXXXXX", CLEANUP => 1, TMPDIR => 1); &load_config(@configfiles); $remote_dpkg_sig ||= "dpkg-sig"; my @files = @ARGV; if ($sign_hashes || $write_signature) { for (@files) { unless (/\.dpkg-sig-hashes$/) { die _die("$_: Make sure all files were generated by dpkg-sig --get-hashes file\n"); } } for my $file (@files) { if ($sign_hashes) { print "Processing $file...\n"; sign_hashes($file); print "Signed hashes in $file...\n"; } else { print "Processing $file...\n"; my @done = write_signature($file); print "Added signature to $_\n" for (@done); } } exit; } if (grep { ! /(?:deb|debdelta|changes)$/ } @files) { die _die("We can only work on debs, debdeltas, or changes files."); } if ($sign) { if (length($sign) > 9) { die _die("The signing name '$sign' is too long."); } elsif ($sign !~ /^[a-z]+$/) { die _die("The signing name '$sign' is not valid. Please use only letters."); } } if ($verify_role) { $verify_pattern = "^_gpg".$verify_role."[0-9A-Z]?\$"; } elsif ($verify_exact) { $verify_pattern = "^_gpg$verify_exact"; } else { $verify_pattern = "^_gpg.+"; } #The main loop: for my $exp (@files) { my @globbed_files = glob_exp($exp); die _die("Cannot find $exp: no such file") if (! @globbed_files); for my $file (@globbed_files) { if (($file =~ /\.deb$/) || ($file =~ /\.debdelta$/)) { #Yay! That's easy! print "Processing $file...\n"; if ($sign) { sign_deb($sign, $file); print "Signed deb $file\n" if ! $batch; } elsif ($verify || $verify_role || $verify_exact) { my @verify_output = verify_deb($file, $verify_pattern); print @verify_output; exit 2 if grep { /^BADSIG/ } @verify_output; exit 3 if grep { /^UNKNOWNSIG/ } @verify_output; } elsif ($list) { for (get_deb_parts($file)) { print "$1\n" if ($_->[0] =~ /_gpg(.+)/); } } elsif ($get_hashes) { write_deb_info($get_hashes, $file); unlink "$file.dpkg-sig-hashes"; add_part_to_ar_archive ("$file.dpkg-sig-hashes", "deb\n$get_hashes\n$file ".get_file_md5sum($file)."\n" , "control"); add_part_to_ar_archive ("$file.dpkg-sig-hashes", _read_file("$tempdir/digests"), "deb0"); } } else { print "--- Processing changes file $file:\n"; my $changes_signed = 0; my (%new_debs, $sums_control_data, @deb_md5sums, $maintainer_from_changes); #Get default from *changes: unless ($maintainer || $key) { $maintainer_from_changes = 1; chomp($maintainer = `grep ^Changed-By: $file | cut -d " " -f 2-`); $maintainer = quotemeta($maintainer); } if ($get_hashes) { unlink "$file.dpkg-sig-hashes"; $sums_control_data = "changes $file\n$get_hashes\n"; } for my $deb (get_debs_from_changes($file, \$changes_signed)) { print "Processing $deb...\n"; if ($sign) { my $r = sign_deb($sign, $deb); $new_debs{$r->[2]} = $r; print "Signed deb $deb\n" if ! $batch; } elsif ($verify || $verify_role || $verify_exact) { my @verify_output = verify_deb($deb, $verify_pattern); print @verify_output; exit 2 if grep { /^BADSIG/ } @verify_output; exit 3 if grep { /^UNKNOWNSIG/ } @verify_output; } elsif ($list) { for (get_deb_parts($deb)) { print "$1\n" if ($_->[0] =~ /_gpg(.+)/); } } elsif ($get_hashes) { $sums_control_data .= $deb." ".get_file_md5sum($deb)."\n"; write_deb_info($get_hashes, $deb); push @deb_md5sums, _read_file("$tempdir/digests"); } } if ($sign) { correct_changes_file($file, \%new_debs); sign_control_files($file) if ($sign_changes ne "no" && ! ($sign_changes eq "auto" && ! $changes_signed)); } elsif ($get_hashes) { add_part_to_ar_archive ("$file.dpkg-sig-hashes", $sums_control_data , "control"); for (my $i=0; $i<@deb_md5sums; $i++) { add_part_to_ar_archive ("$file.dpkg-sig-hashes", $deb_md5sums[$i], "deb$i"); } } undef $maintainer if ($maintainer_from_changes); } } } #Clean our ssh connections: for (values %ssh_connections) { my ($pid, $readerfh, $writerfh) = @$_; print $writerfh "quit\n"; sleep 1; kill $pid; } exit; =pod =head2 I<\@file_info> = sign_deb (I<$signing_role>, I<$file>) Does everything needed to add a signature to I<$file>: =over 4 =item * Verifies existing signatures =item * Creates the meta-data that is actually signed =item * Calls gpg to sign the meta-data. =item * Adds the signature to I<$file> =back Returns a reference to an array containing the new md5sum, the new size and the name of the signed deb. =cut sub sign_deb { my ($sig_name, $file) = @_; #Check the existing signatures: my @verify = verify_deb($file, "^_gpg.+"); if (grep { /^BADSIG/ } @verify) { print STDERR "Can't sign $file, some signatures are invalid:\n".(join "", grep { /^BADSIG/ } @verify)."\n"; exit 2; } #This also chooses the right sig name: $sig_name = write_deb_info($sig_name, $file); sign_file ("$tempdir/digests", "$tempdir/digests.asc", "no_detach"); #Read sig: my $sig = _read_file ($tempdir."/digests.asc"); return add_sig_to_deb($file, $sig, $sig_name); } =pod =head2 I<$signature_name> = write_deb_info (I<$signing_role>, I<$file>) Creates a digests.asc file with the meta-data of I<$file> in dpkg-sig's tempdir: =over 4 =item * Gets the needed information from I<$file> =item * Chooses the name of the signature =item * Writes a file in a RFC822-like format containing the meta-data =back Returns the name that should be used to add the file to the deb. =cut sub write_deb_info { my ($sig_name, $deb) = @_; #Get digests: my $digests = get_deb_digests($deb); #Get name for our new signature part of the archive $sig_name = get_sig_name($sig_name, $digests, $deb); #Create digests file unlink ($tempdir."/digests.asc"); my $signer_name = $key || $maintainer; if ($signer_name =~ /^(0x)?[0-9A-F]{8}$/i) { my $uidline = (grep /^uid:/, qx/gpg --list-keys --with-colons --fixed-list-mode $signer_name/)[0]; $signer_name = (split /:/, $uidline)[9] if defined $uidline; chomp($signer_name); } my @data; push @data, "Version: 4\n"; push @data, "Signer: $signer_name\n"; push @data, "Date: " . localtime() . "\n"; push @data, "Role: $1\n" if $sig_name =~ /^_gpg(\S+?)[A-Z0-9]?$/; push @data, "Files: \n"; for my $part_info (@$digests) { push @data, "\t" . join (" ", reverse @$part_info) . "\n"; } _write_file("$tempdir/digests", @data); return $sig_name } =pod =head2 sign_hashes (I<$file>) Signs a .dpkg-sig-hashes I<$file> containing the digests of a deb/changes file: =over 4 =item * Checks the .dpkg-sig-hashes file to see if it really was created by us =item * Creates a new archive, containing the old control file =item * Signs the digests and adds the clearsigned data to the the new archive =item * Substitutes the old file by the new, signed one. =back =cut sub sign_hashes { my ($file) = @_; unlink ($tempdir."/digests"); unlink ($tempdir."/digests.asc"); unlink ($tempdir."/hashes.signed"); #We don't need the control data, we just want to check if this is real #dpkg-sig generated hashes archiv: my $control = get_archive_part($file, "control"); if ($control !~ /^(deb|changes)/) { die _die("$file seems not to be a dpkg-sig hash archive"); } add_part_to_ar_archive($tempdir."/hashes.signed", $control, "control"); #Now sign all hashes: my $num = 0; for (get_deb_parts($file)) { my $part_name = $_->[0]; if ($part_name !~ /^(deb\d+|control)$/) { print STDERR "W: $file contains $part_name, which shouldn't happen in dpkg-sig hash archive\n"; } elsif ($part_name =~ /^deb\d+/) { my $data = get_archive_part($file, $part_name); if ($data =~ /-----BEGIN PGP SIGNATURE-----/) { die _die("$file seems to be already signed!\n"); } _write_file($tempdir."/digests", $data); sign_file("$tempdir/digests", "$tempdir/digests.asc", "no_detach"); my $s_data = _read_file($tempdir."/digests.asc"); add_part_to_ar_archive($tempdir."/hashes.signed", $s_data, "deb".$num++); } } move($tempdir."/hashes.signed", $file); } =pod =head2 I<@changed_files> = write_signature (I<$file>) Adds the signatures from a signed .dpkg-sig-hashes I<$file> to the signed debs: =over 4 =item * Checks the .dpkg-sig-hashes file to see if it really was created by us =item * Tries to find out where we find the debs that have sigs in the .dpkg-sig-hashes =item * Checks if the debs were changed since they were signed =item * Adds signatures from the .dpkg-sig-hashes file to the debs =item * If needed, it corrects the changes file to reflect the new sizes/md5sums of the debs =back Returns the pathes of the debs that were changed. =cut sub write_signature { my ($file) = @_; my @done; unlink ($tempdir."/digests"); unlink ($tempdir."/digests.asc"); unlink ($tempdir."/hashes.signed"); #Get control data: my @control = split (/\n/, get_archive_part($file, "control")); if ($control[0] !~ /^(deb|changes)/) { die _die("$file seems not to be a dpkg-sig hash archive"); } chomp(my $sig_name = $control[1]); my ($num, %new_debs) = (0); for (get_ar_parts($file)) { my $part_name = $_->[0]; if ($part_name !~ /^(deb\d+|control)$/) { print STDERR "W: $file contains $part_name, which shouldn't happen in dpkg-sig hash archive\n"; } elsif ($part_name =~ /^deb\d+/) { my $sig = get_archive_part($file, $part_name); if ($sig !~ /-----BEGIN PGP SIGNATURE-----/) { die _die("$file seems to be unsigned!\n"); } #deb$num is the detached sig for the deb named in control line $num + 1 #Get the name and the md5sum: my ($name, $md5sum) = split / /, $control[$num + 2]; my $path; #Try to find the deb in this dir: if (file_readable(basename($name)) && get_file_md5sum(basename($name)) eq $md5sum) { $path = basename($name); #Now try the path in the hashes file: } elsif (file_readable($name) && get_file_md5sum($name) eq $md5sum) { $path = $name; #Wrong md5sum } elsif (! (get_file_md5sum(basename($name)) eq $md5sum || get_file_md5sum($name) eq $md5sum)) { die _die("The md5sum for $name is wrong. Please use an archive of signed hashes of the version of the file existing now."); #We don't find the damn file! } else { die _die("Can't find $name. Please start dpkg-sig either in the dir with the debs to sign or in the dir where you got the hashes."); } push @done, $path; $sig_name = get_sig_name($sig_name, [get_deb_parts($path)], $path); my $r = add_sig_to_deb($path, $sig, $sig_name); $new_debs{$r->[2]} = $r; $num++; } } if ($control[0] =~ /^changes (.+)$/) { if (file_readable(basename($1))) { correct_changes_file(basename($1), \%new_debs); print "Corrected changes file ".basename($1)."\n"; } elsif (file_readable($1)) { correct_changes_file($1, \%new_debs); print "Corrected changes file $1\n"; } else { print STDERR "Can't find changes file $1, so won't correct it.\n"; } } return @done; } =pod =head2 I<@output> = verify_deb (I<$deb>, I<$verify_pattern>) Verifies all signatures in I<$deb> with names matching I<$verify_pattern>: =over 4 =item * Gets the digests of all parts of I<$deb>. =item * Skips all signatures that don't match I<$verify_pattern>. =item * Writes the signatures to $tempdir/digests.asc. =item * Calls a function to check if $tempdir/digests.asc is valid in the v4 format, then tries v3 and v2. =back Returns its output. This is needed to achieve a "silent" verification when signing a deb. =cut sub verify_deb { my ($deb, $verify_pattern) = @_; my @return; #Get MD5 sums: my $digests = get_deb_digests($deb); for (my $n=0;$n<@$digests;$n++) { my ($part_name, $size, $sha1sum, $md5sum) = @{@$digests[$n]}; next if $part_name !~ /$verify_pattern/; unlink ($tempdir."/digests.asc"); unlink ($tempdir."/digests"); my $sig = get_archive_part($deb, $part_name); _write_file($tempdir."/digests.asc", $sig); my ($status, @info); if ($sig =~ /BEGIN PGP SIGNED MESSAGE/) { $status = verify_deb_sig_v4($part_name, $n, $digests, \@info, \@return); } if ($check_v3_sig && (!$status || $status eq "BAD")) { $status = verify_deb_sig_v3($part_name, $n, $digests, \@info, \@return); } if ($check_v2_sig && (!$status || $status eq "BAD")) { $status = verify_deb_sig_v2($part_name, $n, $digests, \@info, \@return); } if ($status && $status eq "GOOD") { push @return, "GOODSIG $part_name $info[0] $info[2]\n"; } elsif ($status && $status eq "UNKNOWN" && (! $batch || $batch >= 2)) { push @return, "UNKNOWNSIG $part_name $info[0]\n"; } else { push @return, "BADSIG $part_name\n" } } return @return; } =pod =head2 I<$verification_status> = verify_deb_sig_v4 (I<$part_name>, I<$part_number>, I<\@digests>, I<\@info>, I<\@return>) Verifies if $tempdir/digests is a valid (version 4) signature for the deb described with I<\@digests>: =over 4 =item * Calls gpg to verify the OpenPGP signature in $tempdir/digests.asc itself. =item * Parses the signature to get the digests that were actually signed =item * Compare the digests of the deb and those extracted from the signature to see if the deb was changed. =item * Check that the name in the ar archive matches the "Role" field in the signature. =item * DON'T check the Signer- and Date-Fiels. =item * Check that at least the digests for control.tar.gz, data.tar.gz and debian-binary were signed. =back Returns if the the signature is good, by an unknown key, or bad. =cut sub verify_deb_sig_v4 { my ($part_name, $part_number, $digests, $info, $return) = @_; #Check signature: my @cmdline = qw(gpg --openpgp --decrypt --no-auto-check-trustdb --batch --no-tty --status-fd 1 2>&1); push @cmdline, "--output", "$tempdir/digests", "$tempdir/digests.asc"; my $res=qx/@cmdline/; @$info = split(/ /, $1 ) if $res =~ /^\[GNUPG:\] VALIDSIG (.*)$/m; if ($res =~ /^\[GNUPG:\] NO_PUBKEY \S{8}(\S{8})/m) { $info->[0] = $1; return "UNKNOWN"; } return "FORCE_BAD" unless $res =~ /^\[GNUPG:\] GOOD/m; #Now find out if the deb contains the data that was signed: open (FH, "<", "$tempdir/digests") || die "Can't open $tempdir/digests: $!"; my (%data, $field_name); while () { if (/^(\S+):\s*(.*)$/) { $field_name = lc($1); $data{$field_name} = $2 || ""; } elsif (/^\s+(.+)$/ && $field_name) { $data{$field_name} .= ($data{$field_name} ? "\n" : "") . $1; } } close FH; if ($data{version} > 4) { push @$return, "$part_name: v$data{version} signature, dpkg-sig is too old to check it.\n"; return "FORCE_BAD"; } #for each file (and hashes) listed in the signature, check the computed hashes in the archive for my $file_info (split /\n/, $data{files}) { my ($md5sum, $sha1sum, $size, $name) = split /\s+/, $file_info; my $checked_something=0; for my $member_info (@$digests) { if ($member_info->[0] eq $name) { $checked_something = 1; if ($member_info->[1] ne $size) { push @$return, "$part_name: ${name}'s size differs from signed size.\n" if $verbose; return "FORCE_BAD"; } elsif ($member_info->[2] ne $sha1sum) { push @$return, "$part_name: ${name}'s sha1sum differs from signed size.\n" if $verbose; return "FORCE_BAD"; } elsif ($member_info->[3] ne $md5sum) { push @$return, "$part_name: ${name}'s md5sum differs from signed size.\n" if $verbose; return "FORCE_BAD"; } } } unless ($checked_something) { push @$return, "$part_name: ${name} signed, but not in the deb.\n" if $verbose; return "FORCE_BAD"; } } #for each file contained in the archive, check that it is listed in the signature for my $member_info (@$digests) { my $part_present=0; if ($member_info->[0] =~ /^_gpg/) { # ignore any member of the archive whose name starts with _gpg } else { for my $file_info (split /\n/, $data{files}) { my ($md5sum, $sha1sum, $size, $name) = split /\s+/, $file_info; if ($member_info->[0] eq $name) { $part_present = 1; } } unless ($part_present) { push @$return, "$part_name: " . $member_info->[0] . " in deb, but not signed.\n" if $verbose; return "FORCE_BAD"; } } } $part_name =~ /^_gpg(\S+?)[A-Z0-9]?$/; if (($data{role} ne $1) && (($data{role} . "/") ne $1)) { push @$return, "$part_name: signature name and signed role differ.\n" if $verbose; return "FORCE_BAD"; } return "GOOD"; } =pod =head2 I<$verification_status> = verify_deb_sig_v3 (I<$part_name>, I<$part_number>, I<\@digests>, I<\@info>, I<\@return>) Verifies if $tempdir/digests is a valid (version 3) signature for the deb described with I<\@digests>: =over 4 =item * Creates a file in $tempdir/digests that contains the signing role and the digests from the current deb. =item * Calls gpg to verify that the detached OpenPGP signature in $tempdir/digests.asc is valid for $tempdir/digests. =back Returns if the the signature is good, by an unknown key, or bad. =cut sub verify_deb_sig_v3 { my ($part_name, $part_number, $digests, $info, $return) = @_; my @cmdline = qw(gpg --openpgp --verify --no-auto-check-trustdb --batch --no-tty --status-fd 1 2>&1); push @cmdline, "$tempdir/digests.asc", "$tempdir/digests"; push @$return, "$part_name: Invalid v4 sig ... Trying v3\n" if $verbose; $part_name =~ s/^_gpg(\S+?)[A-Z0-9]?$/$1/; open (FH, ">", $tempdir."/digests") || die _die("Couldn't open $tempdir/digests: $!"); print FH $part_name, "\n"; print FH join "\n", map { $_->[3] . " " . $_->[0] } @$digests[0..$part_number-1]; print FH "\n"; close FH; my $res=qx/@cmdline/; @$info = split(/ /, $1 ) if $res =~ /^\[GNUPG:\] VALIDSIG (.*)$/m; if ($res =~ /^\[GNUPG:\] NO_PUBKEY \S{8}(\S{8})/m) { $info->[0] = $1; return "UNKNOWN"; } return "BAD" unless $res =~ /^\[GNUPG:\] GOOD/m; return "GOOD"; } =pod =head2 I<$verification_status> = verify_deb_sig_v2 (I<$part_name>, I<$part_number>, I<\@digests>, I<\@info>, I<\@return>) Verifies if $tempdir/digests is a valid (version 2) signature for the deb described with I<\@digests>: =over 4 =item * Creates a file in $tempdir/digests that contains the digests from the current deb. =item * Calls gpg to verify that the detached OpenPGP signature in $tempdir/digests.asc is valid for $tempdir/digests. =back Returns if the the signature is good, by an unknown key, or bad. =cut sub verify_deb_sig_v2 { my ($part_name, $part_number, $digests, $info, $return) = @_; my @cmdline = qw(gpg --openpgp --verify --no-auto-check-trustdb --batch --no-tty --status-fd 1 2>&1); push @cmdline, "$tempdir/digests.asc", "$tempdir/digests"; push @$return, "$part_name: Invalid v3 sig ... Trying v2\n" if $verbose; open (FH, ">", $tempdir."/digests") || die _die("Couldn't open $tempdir/digests: $!"); print FH join "\n", map { $_->[3] . " " . $_->[0] } @$digests[0..$part_number-1]; print FH "\n"; close FH; my $res= qx/@cmdline/; @$info = split(/ /, $1 ) if $res =~ /^\[GNUPG:\] VALIDSIG (.*)$/m; if ($res =~ /^\[GNUPG:\] NO_PUBKEY \S{8}(\S{8})/m) { $info->[0] = $1; return "UNKNOWN"; } return "BAD" unless $res =~ /^\[GNUPG:\] GOOD/m; return "GOOD"; } =pod =head2 I<$sig_name> = get_sig_name (I<$sig_name>, I<\@parts>, I<$deb>) Tries to find a filename for the signature. Receives the role and constructs a name not already present in I<$deb>. Returns the final name or dies if it wasn't possible to construct a name. =cut sub get_sig_name { my ($sig_name, $parts, $deb) = @_; $sig_name = "_gpg".$sig_name; if (grep { $_->[0] eq $sig_name } @$parts) { my $changed = 0; for my $ext (0..9, "A" .. "Z") { if (! grep { $_->[0] eq $sig_name.$ext} @$parts) { $sig_name .= $ext; ++$changed; last; } } die _die("$deb: Couldn't get a name for the signature part") if ! $changed; } return $sig_name; } =pod =head2 correct_changes_file (I<$changes>, I<\%new_deb_info>) Receives a path to a changes file I<$changes> and a hash reference I<\%new_deb_info> containing new sizes and md5sums of debs in that changes file. It'll parse the changes file, replace the old values by the new ones. If the file is signed, the signature will be stripped (as it would be invalid anyway). =cut sub correct_changes_file { my ($changes, $new_deb_info) = @_; if ($changes =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($changes); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "correct_changes_file $file\n"; my ($response, $t); $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response =~ /^300 /) { for (keys %$new_deb_info) { print $writerfh join (" ", @{$new_deb_info->{$_}}), "\n"; } print $writerfh ".\n"; $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } } else { die _die("remote dpkg-sig on $host seems to be weird. Can't parse \"$response\""); } } else { my ($new_changes, $in_files) = ('', 0); open (CHANGES, "+<", $changes) || die _die("$changes: Can't open file: $!"); while () { if (/^-----BEGIN PGP SIGNED MESSAGE-----$/) { while () { last if /^\s*$/ }; next } if ($in_files) { chomp; last if ! s/^ //; my ($md5sum, $size, $section, $priority, $file_name) = split / /, $_; if ($new_deb_info->{$file_name}) { $md5sum = $new_deb_info->{$file_name}->[0]; $size = $new_deb_info->{$file_name}->[1]; chomp($md5sum); } $new_changes .= " " . join (" ", ($md5sum, $size, $section, $priority, $file_name)). "\n"; } else { $new_changes .= $_; } $in_files = "yes" if /^Files:/; } seek(CHANGES, 0, 0) || die _die("$changes: Can't rewind file: $!"); truncate(CHANGES, 0) || die _die("$changes: Can't truncate file: $!"); print CHANGES $new_changes; close CHANGES; } } =pod =head2 I<\@new_file_info> = add_part_to_ar_archive (I<$file>, I<$new_data>, I<$new_name>) =head2 I<\@new_file_info> = add_sig_to_deb (I<$file>, I<$new_data>, I<$new_name>) Adds I<$new_data> to I<$file> as new ar archiv part, using $new_name as filename. If I<$file> doesn't exist, a new ar archive is created. Returns the new md5sum and size of I<$file>. =cut sub add_part_to_ar_archive { return add_sig_to_deb(@_); } sub add_sig_to_deb { my ($deb, $sig, $sig_name) = @_; my ($new_md5sum, $new_file_size); if ($deb =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($deb); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "add_sig_to_deb $sig_name $file\n"; my ($response, $t); $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response =~ /^300 /) { for (split /\n/, $sig) { s/^\./../g; print $writerfh $_, "\n"; } print $writerfh ".\n"; $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); ($new_md5sum, $new_file_size) = split (/ /, $response); } } else { die _die("remote dpkg-sig on $host seems to be weird. Can't parse \"$response\""); } } else { die _die("$deb: Arch member name $sig_name too long!") if (length($sig_name) > 14); my $new_part = sprintf("%-16s%-12s%-6s%-6s%-8s%-10s`\n%s", $sig_name, time, 0, 0, 100644, length($sig), $sig . (length($sig)%2 ? "\n":"")); if (!stat($deb)) { open (DEB, ">", (glob $deb)[0]) || die _die("Couldn't open ".(glob $deb)[0].": $!"); print DEB "!\n"; } else { open (DEB, ">>", (glob $deb)[0]) || die _die("Couldn't open ".(glob $deb)[0].": $!"); } print DEB $new_part || die _die("Couldn write to $deb: $!"); close DEB; $new_md5sum = get_file_md5sum($deb); $new_file_size = (stat($deb))[7]; } return [$new_md5sum, $new_file_size, basename($deb)]; } =pod =head2 I<@parts> = get_ar_parts (I<$file>) =head2 I<@parts> = get_deb_parts (I<$file>) Parses I<$file> as ar archive and returns all filenames included in the archive. =cut sub get_ar_parts { get_deb_part(@_) } sub get_deb_parts { my ($deb) = shift; my @parts; if ($deb =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($deb); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "get_deb_parts $file\n"; my $response = <$readerfh>; chomp($response); if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { while (<$readerfh>) { last if (/^\.$/); s/^\.\././; chomp; push @parts, [$_]; } } } else { open(DEB, "<", (glob $deb)[0]) || die _die("Couldn't open $deb: $!"); if (read(DEB, $_, 8) != 8) { die _die("Couldn't open $deb: ar format b0rken [Couldn't read first 8 bytes]"); } elsif ($_ ne "!\n") { die _die("Couldn't open $deb: ar format b0rken"); } do { my $line = ; if ($line =~ /\S/) { #This should help with additional newlines #debian-binary 1075243548 0 0 100644 4 ` my $name = substr($line, 0, 16); $name =~ s/\s*//g; $name =~ s/\/$//; # the 'ar' tool adds a / at the end, this is compatible with dpkg-deb my $length = substr($line, 48, 10); $length =~ s/\s*//g; next if (!$name && $length && $length =~ /^\d+\s*$/); seek (DEB, $length, 1) or die _die("Couldn't read $name in $deb: File too short!"); if ($length % 2) { seek (DEB, 1, 1) or die _die("Couldn't read $name in $deb: File too short!"); } push @parts, [$name]; } } while (!eof(DEB)); close DEB; } return @parts; } =pod =head2 I<@debs> = get_debs_from_changes (I<$file>, I<\$changes_signed>) Parses I<$file> as Debian .changes file and returns all listed debs. The dirname of I<$file> is prepended to the debs, which means that the returned URIs should exist. If I<$file> is signed, I<$changes_signed> is set to "yes". =cut sub get_debs_from_changes { my ($changes, $changes_signed) = @_; my $changes_path = dirname($changes); my @debs; if ($changes =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($changes); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "get_debs_from_changes $file\n"; my $response = <$readerfh>; chomp($response); if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { $$changes_signed = "yes" if $response =~ /^200 ok debs in signed/; while (<$readerfh>) { last if (/^\.$/); s/^\.\././; chomp; if (defined ($user)) { push @debs, "ssh://$user\@$host:$_"; } else { push @debs, "ssh://$host:$_"; } } } } else { open (CHANGES, "<", $changes) || die _die("$changes: Can't open file: $!"); while () { $$changes_signed = "yes" if /-----BEGIN PGP SIGNED MESSAGE-----/; last if /^Files:/ } while () { chomp; if (/^ [^ ]+ \d+ [^ ]+ [^ ]+ (.+)$/) { push @debs, $changes_path."/".$1 if $1 =~ /^(.+\.deb)$/; } elsif (/^\s*$/) { last; } else { print STDERR "$changes corrupted\n"; } } close CHANGES; } return @debs; } =pod =head2 I<\@digests> = get_deb_digests (I<$deb>) Parses I<$deb> and returns the meta-data of the included files. The read data is piped to md5sums and sha1sums, which create the respective digests. The digests, the filename and the size are put in an anymous array looking like this: [B<$name>, B<$size>, B<$sha1sum>, B<$md5sum>]. One of these arrays is pushed to I<@digests> for every file in I<$deb>. =cut sub get_deb_digests { my $deb = shift; my @digests; if ($deb =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($deb); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "get_deb_digests $file\n"; my $response = <$readerfh>; chomp($response); if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { while (<$readerfh>) { last if (/^\.$/); s/^\.\././; chomp; my ($name, $size, $sha1sum, $md5sum) = split / /, $_; push @digests, [$name, $size, $sha1sum, $md5sum]; } } } else { open(DEB, "<", (glob $deb)[0]) || die _die("Couldn't open $deb: $!"); if (read(DEB, $_, 8) != 8) { die _die("Couldn't open $deb: ar format b0rken [Couldn't read first 8 bytes]"); } elsif ($_ ne "!\n") { die _die("Couldn't open $deb: ar format b0rken"); } do { my $line = ; if ($line =~ /\S/) { #This should help with additional newlines my ($name, $size, $md5sum, $sha1sum); #debian-binary 1075243548 0 0 100644 4 ` $name = substr($line, 0, 16); $name =~ s/\s*//g; $name =~ s/\/$//; # the 'ar' tool adds a / at the end, this is compatible with dpkg-deb my $length = substr($line, 48, 10); $length =~ s/\s*//g; next if ($name eq "" && $length && $length =~ /^\d+\s*$/); $size = $length; my ($part, $read_length, $md5sum_input, $md5sum_output, $sha1sum_input, $sha1sum_output); open2($md5sum_output, $md5sum_input, qq{md5sum}); if (-x "/usr/bin/sha1sum") { open2($sha1sum_output, $sha1sum_input, qq{sha1sum}); } elsif (-x "/usr/bin/gpg") { #We need this for woody *sigh*: open2($sha1sum_output, $sha1sum_input, qq{gpg --print-md sha1 | tr 'A-Z' 'a-z' | sed 's/ //g'}); } else { die _die("Can't compute sha1sum, please install sha1sum or gpg"); } do { $read_length = ($length > 4096) ? 4096 : $length; $length -= $read_length; if (read (DEB, $part, $read_length) != $read_length) { die _die("Couldn't read $name in $deb: File too short!"); } if ($read_length % 2 && read (DEB, $_, 1) != 1) { die _die("Couldn't read $name in $deb: File too short!"); } print $md5sum_input $part; print $sha1sum_input $part; } while ($length > 0); close $md5sum_input; close $sha1sum_input; ($md5sum = <$md5sum_output>) =~ s/[\s\n\r-]//g; ($sha1sum = <$sha1sum_output>) =~ s/[\s\n\r-]//g; push @digests, [$name, $size, $sha1sum, $md5sum]; } } while (!eof(DEB)); close DEB; } return \@digests; } =pod =head2 I<$md5sum> = get_file_md5sum (I<$file>) Returns the md5sum for I<$file>. =cut sub get_file_md5sum { my $file = shift; my $md5sum; if ($file =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($file); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); if ($prot_version < 5) { die _die("remote dpkg-sig on $host is too old and can't return the needed md5sum of a file."); } print $writerfh "get_file_md5sum $file\n"; my ($response, $t); $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response =~ /^200 ok md5sum is (\S+)/) { $md5sum = $1; } else { die _die("remote dpkg-sig on $host returned \"$response\""); } } else { chomp ($md5sum = `md5sum $file | cut -d " " -f 1`); } return $md5sum; } =pod =head2 I<$part_data> = get_archive_part (I<$archive>, I<$part_name>) Returns the content of I<$part_name> in the ar archive I<$archive>. =cut sub get_archive_part { my ($deb, $part_name) = @_; my $part = ''; if ($deb =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($deb); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "get_archive_part $part_name $file\n"; my $response = <$readerfh>; if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { while (<$readerfh>) { last if (/^\.$/); s/^\.\././; chomp; $part .= "$_\n"; } } } else { open(DEB, "<", $deb) || die _die("Couldn't open $deb: $!"); if (read(DEB, $_, 8) != 8) { die _die("Couldn't open $deb: ar format b0rken [Couldn't read first 8 bytes]"); } elsif ($_ ne "!\n") { die _die("Couldn't open $deb: ar format b0rken"); } while (!eof(DEB)) { my $line = ; #debian-binary 1075243548 0 0 100644 4 ` my $name = substr($line, 0, 16); my $length = substr($line, 48, 10); next if (!$name && $length && $length =~ /^\d+\s*$/); my $tmp_part; if (read (DEB, $tmp_part, $length) != $length) { die _die("Couldn't read $name in $deb: File too short!"); } if ($length % 2 && read (DEB, $_, 1) != 1) { die _die("Couldn't read $name in $deb: File too short!"); } $name =~ s/\s*//g; $name =~ s/\/$//; # the 'ar' tool adds a / at the end, this is compatible with dpkg-deb if ($name eq $part_name) { $part = $tmp_part; last; } } close DEB; } return $part; } =pod =head2 I<@file_data> = read_control_file (I<$file>) Returns the content of I<$file> as array with one line per element. =cut sub read_control_file { my $file = shift; my @file_data; die _die("This only returns debian control files (ending with .changes or dsc)") if $file !~ /\.(?:dsc|changes)$/; if ($file =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($file); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); if ($prot_version < 3) { die _die("remote dpkg-sig on $host is too old and can't return the needed control file data."); } print $writerfh "read_control_file $file\n"; my $response = <$readerfh>; chomp($response); if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { while (<$readerfh>) { last if (/^\.$/); s/^\.\././; push @file_data, $_; } } } else { open (FH, $file) or die _die("Can't open $file: $!"); @file_data = ; close FH; } return @file_data; } =pod =head2 I<@file_info> = write_control_file (I<$file>, I<\@data>) Writes contents of I<\@data> to I<$file>. Returns new md5sum and size of I<$file>. =cut sub write_control_file { my ($file, $data) = @_; my ($response, $t, $new_md5sum, $new_file_size); die _die("This only writes debian control files (ending with .changes or dsc)") if $file !~ /\.(?:dsc|changes)$/; if ($file =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($file); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); if ($prot_version < 3) { die _die("remote dpkg-sig on $host is too old and can't return the needed control file data."); } print $writerfh "write_control_file $file\n"; $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response =~ /^300 /) { print $writerfh @$data; print $writerfh ".\n"; $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response =~ /^200 .+New md5sum, size: ([^ ]+) (\d+)/) { $new_md5sum = $1; $new_file_size = $2; } else { die _die("remote dpkg-sig on $host returned \"$response\""); } } else { die _die("remote dpkg-sig on $host seems to be weird. Can't parse \"$response\""); } } else { _write_file($file, @$data); chomp ($new_md5sum = `md5sum $file | cut -d " " -f 1`); $new_file_size = (stat($file))[7]; } return ($new_md5sum, $new_file_size); } =pod =head2 I<@files> = glob_exp (I<$exp>) Returns the result of globbing I<$exp> as array. =cut sub glob_exp { my $exp = shift; my @files; if ($exp =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($exp); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); print $writerfh "glob_exp $file\n"; my $response = <$readerfh>; if ($response !~ /^200 /) { die _die("remote dpkg-sig on $host returned \"$response\""); } else { while (<$readerfh>) { last if (/^\.$/); s/^\.\././; chomp; if (defined ($user)) { push @files, "ssh://$user\@$host:$_"; } else { push @files, "ssh://$host:$_"; } } } } else { push @files, glob($exp); } return @files; } =pod =head2 I<$file_readable> = file_readable (I<$file>) Returns a true value if I<$file> is readable. =cut sub file_readable { my $file = shift; if ($file =~ m!^ssh://!) { my ($user, $host, $file) = split_ssh_uri($file); my ($readerfh, $writerfh, $prot_version) = get_ssh_connection($user, $host); if ($prot_version < 4) { print "W: remote dpkg-sig on $host is too old and can't return the needed data. .dsc not signed"; return 0; } print $writerfh "file_readable $file\n"; my ($response, $t); $response = ''; do { read($readerfh, $t, 1); $response .= $t } while ($t ne "\n"); chomp($response); if ($response =~ /^200 /) { return 1; } elsif ($response =~ /^400 /) { return 0; } else { die _die("remote dpkg-sig on $host returned \"$response\""); } } else { return -r $file; } } =pod =head2 I<@ssh_uri_parts> = split_ssh_uri (I<$uri>) Splits an ssh URI $uri into a B<$user>, B<$host> and B<$path> part. =cut sub split_ssh_uri { my ($uri) = @_; my ($user, $host, $path); #ssh://$USER@$HOST:$PATH if ($uri =~ m!^ssh://(?:([^@\s]+)@)?(\S+):(.+)!) { ($user, $host, $path) = ($1, $2, $3); $user ||= undef; die _die("$uri: Please specify at least a host to connect to.") if !$host; die _die("$uri: Please specify a path on the remote host.") if !$path; } else { die _die("$uri is no ssh uri!"); } return ($user, $host, $path); } =pod =head2 I<@ssh_connection_info> = get_ssh_connection (I<$user>, I<$host>) Opens a ssh connection to I<$host> as user I<$user>, directly calling B. It checks if the remote B is compatible to the current version and returns the B<$pid>, the Read-Filehandle B<$readerfh> and the Write-Filehandle B<$writerfh>. =cut sub get_ssh_connection { my ($user, $host) = @_; my $connection_id = (defined ($user))?"$user\@$host":$host; $remote_ssh_port ||= ""; $remote_ssh_port =~ s/^\s*(\d+)\s*$/-p $&/; if (! $ssh_connections{$connection_id} ) { my ($readerfh, $writerfh); die _die("No ssh installed, we need it to connect to the remote host.") if (not `which ssh`); my $pid = open2($readerfh, $writerfh, qq{ssh $remote_ssh_port $connection_id '$remote_dpkg_sig --client $DEBUG 2>/dev/null || echo "No dpkg-sig available"' 2>&1}); my $response = <$readerfh>; if ($response && $response !~ /protocol version/i) { if ($response && $response =~ /No dpkg-sig available/) { die _die("No $remote_dpkg_sig on remote host installed."); } else { if ($response) { die _die("ssh returned $response"); } else { die _die("Some problem with the ssh connection $connection_id occured"); } } } if ($response !~ /protocol version (\d+)$/i || $1 < 6) { die _die("dpkg-sig on $host is too old (we need protocol version 6)"); } $ssh_connections{$connection_id} = [$pid, $readerfh, $writerfh, $1]; } return (@{$ssh_connections{$connection_id}}[1,2,3]); } =pod =head2 sign_control_files (I<$changes_file>) This works like debsign: =over 4 =item Checks if a .dsc exists. =item If the .dsc should be signed, it tries to do so.. =item Writes the new .dsc with the new signature. =item Reads I<$changes_file> and puts in the new size/md5sum of the .dsc. =item Signs I<$changes_file> and write the signed copy back. =cut sub sign_control_files { my $file = shift; my $sign_dsc = $sign_changes =~ /full$/ ? 1 : 0; my ($dsc, $new_dsc_md5sum, $new_dsc_size); $dsc = "$1.dsc" if ($file =~ /^(.+)_[^ _]+.changes/ && file_readable("$1.dsc")); #Clean the tempdir: unlink ($tempdir."/dsc.unsigned"); unlink ($tempdir."/dsc.signed"); unlink ($tempdir."/changes.unsigned"); unlink ($tempdir."/changes.signed"); if ($sign_dsc && $dsc) { open (DSC, ">", $tempdir."/dsc.unsigned") || die _die("Can't open $tempdir/dsc.unsigned: $!"); my @data = read_control_file($dsc); for (my $i=0;$i<@data;$i++) { if ($data[$i] =~ /^-----BEGIN PGP SIGNED MESSAGE-----$/) { if ($sign_changes eq "force_full") { $sign_dsc = 1; } elsif (! $batch) { print "The .dsc file is already signed.\nWould you like to use the current signature? [Yn] "; chomp(my $answer = lc()); $sign_dsc = 0 unless ($answer eq "n" || $answer eq "no"); } else { $sign_dsc = 0; } while(defined $data[$i]) { last if $data[$i++] =~ /^\s*$/ } } elsif ($data[$i] =~ /^\s*$/) { last; } print DSC $data[$i]; } print DSC "\n"; close DSC; if ($sign_dsc) { #Sign it: sign_file($tempdir."/dsc.unsigned",$tempdir."/dsc.signed", "no_detach") if $sign_dsc; #Read and write them to the fitting location: open (DSC, $tempdir."/dsc.signed") || die _die("Can't open $tempdir/dsc.signed: $!"); @data = ; close DSC; ($new_dsc_md5sum, $new_dsc_size) = write_control_file($dsc, \@data); print "Signed .dsc $dsc\n" unless $batch; } } #Now the changes file: open (CHANGES, ">", $tempdir."/changes.unsigned") || die _die("Can't open $tempdir/changes.unsigned: $!"); my $basename_dsc = basename($dsc) if $dsc; for (read_control_file($file), "\n") { #If we've changed the .dsc file, we have to use the new values in the .changes: if ($basename_dsc && $new_dsc_md5sum && $new_dsc_size && $_ =~ /\Q$basename_dsc\E$/) { s/^ [^ ]+ \d+ (.+)$/ $new_dsc_md5sum $new_dsc_size $1/; } print CHANGES $_; } close CHANGES; sign_file($tempdir."/changes.unsigned",$tempdir."/changes.signed", "no_detach"); my @data; open (CHANGES, $tempdir."/changes.signed") || die _die("Can't open $tempdir/changes.signed: $!"); @data = ; close CHANGES; write_control_file($file, \@data); print "Signed .changes $file\n" unless $batch; } =pod =head2 sign_file (I<$in_file>, I<$out_file>, I<$no_detach>) Signs I<$in_file> with gpg and puts the detached signature in I<$out_file>. If I<$no_detach> is true, I<$out_file> is a clearsigned copy of I<$in_file>. =cut sub sign_file { my ($in_file, $out_file, $no_detach) = @_; my @cmdline = ("gpg", "--openpgp", "--armor", "--output", $out_file); if ($no_detach) { push @cmdline, "--clearsign"; } else { push @cmdline, "--detach-sign"; } if ($key) { push (@cmdline, "--default-key", "'$key'"); print "Default key: $key\n" if $verbose; } elsif ($maintainer) { push (@cmdline, "--default-key", "'$maintainer'"); } if ($pass_file) { push (@cmdline, "--no-tty", "--batch", "--passphrase-fd", "42", "42<$pass_file"); print "Using passphrase from $pass_file\n" if $verbose; } elsif ($passphrase) { push (@cmdline, "--no-tty", "--batch", "--passphrase-fd", "0"); print "Using cached passphrase\n" if $verbose; } push (@cmdline, $gpgoptions) if $gpgoptions; print "Signing $in_file with key ".($key || "of $maintainer")."\n" if $verbose; push (@cmdline, $in_file, "2>&1"); open (GPG, "| ".join " ", @cmdline) || die _die("Signing failed: $!"); print GPG $passphrase, "\n" if $passphrase; close GPG; die _die("Signing failed. Error code: $?") if $?; } sub read_cmds { $DEBUG && (open (LOG, ">", "/tmp/dpkg-sig.log") || die _die("Couldn't open log: $!")); $DEBUG && select LOG; $|=1; $DEBUG && select STDOUT; sub send { print STDOUT @_; $DEBUG && print LOG "Sent: ", @_; } sub read { $_ = ; $DEBUG && print LOG "Received: ", $_; return $_ } ; while ($_ = &read()) { chomp; if (/^get_deb_digests (.+)$/) { my $r = eval { get_deb_digests ($1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send ("500 error: $@\n"); } else { &send("200 ok digests for $1 follow\n"); &send(join (" ", @$_), "\n") for @$r; &send(".\n"); } } elsif (/^get_deb_parts (.+)$/) { my @r = eval { get_deb_parts ($1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send ("500 error: $@\n"); } else { &send("200 ok parts of $1 follow\n"); &send($_->[0] . "\n") for @r; &send(".\n"); } } elsif (/^get_archive_part ([^ ]+) (.+)$/) { my $r = eval { get_archive_part ($2, $1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: $@\n"); } else { &send("200 ok part $1 of $2 follows\n"); for (split (/\n/, $r)) { s/^\./../; &send("$_\n"); } &send(".\n"); } } elsif (/^read_control_file (.+)$/) { my @r = eval { read_control_file ($1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: $@\n"); } else { &send("200 ok file $1 follows\n"); for (@r) { s/^\./../; &send("$_"); } &send(".\n"); } } elsif (/^get_debs_from_changes (.+)$/) { my $changes_signed = 0; my @r = eval { get_debs_from_changes ($1, \$changes_signed) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: $@\n"); } else { if ($changes_signed) { &send("200 ok debs in signed $1 follow\n"); } else { &send("200 ok debs in $1 follow\n"); } for (@r) { s/^\./../; &send("$_\n"); } &send(".\n"); } } elsif (/^glob_exp (.+)$/) { my @r = eval { glob_exp ($1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: $@\n"); } else { &send("200 ok files matching \"$1\" follow\n"); for (@r) { s/^\./../; &send("$_\n"); } &send(".\n"); } } elsif (/^file_readable (.+)$/) { my $r = eval { file_readable ($1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: $@\n"); } else { if ($r) { &send("200 ok file readable\n"); } else { &send("400 not ok file not readable\n"); } } } elsif (/^get_file_md5sum (.+)$/) { my $r = eval { get_file_md5sum ($1) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: $@\n"); } else { &send("200 ok md5sum is $r\n"); } } elsif (/^add_sig_to_deb ([^ ]+) (.+)$/) { my ($sig_name, $deb, $sig) = ($1, $2, ''); &send("300 ok waiting for data\n"); while ($_ = &read()) { last if (/^\.$/); s/^\.\././; $sig .= $_; } my $r = eval { add_sig_to_deb ($deb, $sig, $sig_name) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: "); &send($@, "\n"); } else { &send("200 ok added sig to $deb. New data follows\n"); &send(join (" ", @$r), "\n"); } } elsif (/^correct_changes_file (.+)$/) { my ($changes, $new_changes_data) = ($1, {}); &send("300 ok waiting for data\n"); while ($_ = &read()) { last if (/^\.$/); s/^\.\././; chomp; my ($md5sum, $size, $name) = split (/ /, $_, 3); $new_changes_data->{$name} = [$md5sum, $size, $name]; } my $r = eval { correct_changes_file ($changes, $new_changes_data) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: "); &send($@, "\n"); } else { &send("200 ok $changes corrected\n"); } } elsif (/^write_control_file (.+)$/) { my ($file, @data) = ($1, ()); &send("300 ok waiting for data\n"); while ($_ = &read()) { last if (/^\.$/); s/^\.\././; push @data, $_; } my @r = eval { write_control_file ($file, \@data) }; if ($@) { chomp($@); $@ =~ s/\n/\t/g; &send("500 error: "); &send($@, "\n"); } else { &send("200 ok $file written. New md5sum, size: $r[0] $r[1]\n"); } } elsif (/^quit\s*$/) { &send("200 ok Bye!\n"); exit; } else { &send("501 unknown command ".(split / /, $_)[0]."\n"); } } $DEBUG && close LOG; } sub process_cli_options { exit 1 unless GetOptions("sign|s=s" => \$sign, "list|l|t" => \$list, "verify|check|c" => \$verify, "verify-role=s" => \$verify_role, "verify-exact=s" => \$verify_exact, "get-hashes=s" => \$get_hashes, "sign-hashes" => \$sign_hashes, "write-signature" => \$write_signature, "client" => \$client, "help" => \$help, #Options: "default-key|k=s" => \$key, "cache-passphrase|p" => \$cache_pass, "passphrase-file|f=s" => \$pass_file, "m=s" => \$maintainer, "e=s" => \$maintainer_pr, "verbose|v+" => \$verbose, "also-v2-sig" => \$check_v2_sig, "also-v3-sig" => \$check_v2_sig, "sign-changes|a:s" => \$sign_changes, "batch:i" => \$batch, "gpg-options|g=s" => \$gpgoptions, "remote-dpkg-sig|r=s" => \$remote_dpkg_sig, "remote-ssh-port|o=i" => \$remote_ssh_port, ); $check_v2_sig = ($check_v2_sig && $check_v2_sig eq "false"?0:"yes"); $check_v3_sig = ($check_v3_sig && $check_v3_sig eq "false"?0:"yes"); die _die('Please use only one of --sign, --list, --verify[-role|-exact], --get-hashes, --write-signature, --help and --client!') if (! !$sign + ! !$list + ! !$verify + ! !$verify_role + ! !$verify_exact + ! !$client + ! !$get_hashes + ! !$sign_hashes + ! !$write_signature + ! ! $help> 1); $maintainer_pr && ($maintainer = $maintainer_pr); if (!$sign && !$list && !$verify && !$verify_role && !$verify_exact && !$client && !$get_hashes && !$sign_hashes && !$write_signature && !$help) { if (@ARGV) { $verify = 1; print "I: No action requested, verifying files.\n"; } else { $help = 1; } } } sub load_config { my @configfiles = @_; for my $configfile (@configfiles) { $configfile = (glob($configfile))[0]; if ($configfile && -r $configfile) { %config = %{read_config_file($configfile) || {}}; } } ($maintainer = ($config{'DEBSIGN_MAINT'} || "")) =~ s/^"(.+)"$/$1/ if ! $maintainer; ($key = ($config{'DPKGSIG_KEYID'} || $config{'DEBSIGN_KEYID'} || "")) =~ s/^"(.+)"$/$1/ if ! $key; ($cache_pass = ($config{'DPKGSIG_CACHE_PASS'} || "")) =~ s/^"(.+)"$/$1/ if ! $cache_pass; ($sign_changes=($config{'DPKGSIG_SIGN_CHANGES'} ||""))=~ s/^"(.+)"$/$1/ if ! $sign_changes; if (! $sign_changes) { $sign_changes = "auto"; } elsif (! grep {$sign_changes eq $_} qw(no auto yes full force_full)) { if ($sign_changes =~ /(?:deb|changes)$/) { push @files, $sign_changes; $sign_changes = "yes"; } else { print "W: Unrecognized argument to --sign-changes, using \"auto\": $sign_changes\n"; $sign_changes = "auto"; } } if ($sign && $cache_pass && ! $pass_file) { eval { require Term::ReadKey; }; if ($@) { print STDERR "Couldn't load Term::ReadKey. Please install. Passphrase caching disabled.\n"; } else { my $passphrase_valid = 0; while (! $passphrase_valid) { print "The passphrase for ".($key || "your default key").": "; Term::ReadKey::ReadMode("noecho"); chomp($passphrase = Term::ReadKey::ReadLine(0)); Term::ReadKey::ReadMode("restore"); print "\n"; #Try to use the key: open (TMP, ">", $tempdir . '/tmp-file') or die "Can't open $tempdir/tmp-file: $!"; print TMP "Foobar"; close TMP; my $error; eval { my @sign_cmd = ("gpg", "--clearsign"); push @sign_cmd, ("--default-key", $key) if $key; push @sign_cmd, ("--no-tty", "--batch", "--passphrase-fd", "0"); push @sign_cmd, $gpgoptions if $gpgoptions; push @sign_cmd, "$tempdir/tmp-file"; my ($write_handle, $read_handle); open3 ($write_handle, $read_handle, undef, @sign_cmd) || die _die("Signing failed: $!"); print $write_handle $passphrase, "\n"; close $write_handle; $error = join "", <$read_handle>; close $read_handle; }; if ($error && $error =~ /^gpg:.+bad passphrase\n/) { $passphrase_valid = 0; print STDERR "Wrong passphrase for " . (($key)?"key $key":"default key") . "!\n"; } else { $passphrase_valid = 1; } } } } } sub help { print < Sign files -c,--verify Verify signatures on files -l,--list List signatures on files --get-hashes Get hashes file for files --sign-hashes Sign hashes file --write-hashes Write sigs from signed hashes file Options: -m,-e Specify maintainer name to use when signing -k Specify keyid to use when signing -v,--verbose Makes dpkg-sig more verbose --also-v2-sig Verify sigs from dpkg-sig 0.2 and earlier --also-v3-sig Verify sigs from dpkg-sig 0.3-0.10 -a,--sign-changes Tells whether also sign the changes and dsc-files. The default is auto. -g,--gpgoptions DANGEROUS: Specify custom gpg options. -p,--cache-passphrase INSECURE: Caches gpg passphrase in dpkg-sig -f,--passphrase-file INSECURE: Let gpg use passphrase from EOH } sub _die { chomp(my $msg = shift || "No error msg given! This is a bug, hurt the author!"); my $i = 0; while ($_ = (caller($i++))[3]) { if ($_ && $_ eq "(eval)") { return $msg; } } my $code = shift || 1; my $line = (caller)[2]; print STDERR "E: $msg\n"; exit $code; } sub _read_file { my $file = shift; my $content; open (FH, $file) or die _die "Can't open $file: $!"; $content = join "", ; close FH; return $content; } sub _write_file { my $file = shift; my @content = @_; open (FH, ">", $file) or die _die "Can't open $file for writing: $!"; print FH @content; close FH; } =pod =head1 AUTHOR B and this manpage were written by Andreas Barth und Marc Brockschmidt. They are Copyright (C) 2003, 2004 by them and released under the GNU General Public Licence version 2 or later; there is NO WARRANTY. See F and F for details. =cut # vim:set shiftwidth=4: # vim:set tabstop=4: # vim:set noet: # vim:set shiftround: debdelta/contrib/findurl.py0000644000000000000000000000650312436652141013175 0ustar #!/usr/bin/python # Copyright (C) 2009 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later import os, sys, string, ConfigParser from string import join def version_mangle(v): if ':' in v : return join(v.split(':'),'%3a') else: return v def delta_uri_from_config(config, **dictio): secs=config.sections() for s in secs: opt=config.options(s) if 'delta_uri' not in opt: print 'Error!! sources.conf section ',s,'does not contain delta_uri' raise SystemExit(1) match=True for a in dictio: #damn it, ConfigParser changes everything to lowercase ! if ( a.lower() in opt ) and ( dictio[a] != config.get( s, a) ) : #print '!!',a, repr(dictio[a]) , ' != ',repr(config.get( s, a)) match=False break if match: return config.get( s, 'delta_uri' ) ############main code if len( sys.argv) <=1 : print "Usage: findurl.py [packages]" else: config=ConfigParser.SafeConfigParser() config.read(['/etc/debdelta/sources.conf', os.path.expanduser('~/.debdelta/sources.conf') ]) try: import apt_pkg except ImportError: print 'ERROR!!! python module "apt_pkg" is missing. Please install python-apt' raise SystemExit try: import apt except ImportError: print 'ERROR!!! python module "apt" is missing. Please install a newer version of python-apt (newer than 0.6.12)' raise SystemExit apt_pkg.init() cache=apt.Cache() #cache.upgrade(True) for a in sys.argv[1:]: p = cache[a] installed_version=p.installed.version candidate = p.candidate candidate_version=p.candidate.version print 'Looking up ',a, ' version ',candidate_version for origin in p.candidate.origins: arch=candidate.architecture if not candidate.uris : print 'Sorry, cannot find an URI to download the debian package of ',a continue deb_uri = candidate.uri deb_path=string.split(deb_uri,'/') deb_path=string.join(deb_path[(deb_path.index('pool')):],'/') print " One of the archives for this package has this info " print " Origin ",origin.origin print " Archive ",origin.archive print " Label ",origin.label print " Site ",origin.site #print " Component ",origin.component is not used below #print " Code Name ", is not available in Python APT interface AFAICS delta_uri_base=delta_uri_from_config(config, Origin=origin.origin, Label=origin.label, Site=origin.site, Archive=origin.archive, PackageName=p.name) if delta_uri_base == None: print ' Sorry, sources.conf does not provide a server for this archive' continue if installed_version == candidate_version: print ' Sorry, this package is already at its newest version for this archive' continue #delta name delta_name=p.name+'_'+version_mangle(installed_version)+\ '_'+ version_mangle(candidate_version)+'_'+\ arch+'.debdelta' uri=delta_uri_base+'/'+os.path.dirname(deb_path)+'/'+delta_name print ' The package ',a,' may be upgraded by using: ', uri debdelta/contrib/histograms.sh0000755000000000000000000000755312436652141013705 0ustar #!/bin/sh unset LANG unset LC_ALL unset LC_NUMERIC if test "$2" = "" -o ! -r "$2" ; then echo Usage $0 ' dir_or_prefix logs [ logs ] ' exit fi PREFIX="$1" shift t=`tempfile` #echo ===== working towards $PREFIX dir=`dirname $0` zcat "$@" | awk -f $dir/sizes_histogram.awk > ${PREFIX}sizes.txt if test -s ${PREFIX}sizes.txt ; then cat > $t < 1 ) { SZ = $16 * 1; TOTDEBSZ = TOTDEBSZ + SZ ; PC = $4 * 1 ; TOTDELTASZ = TOTDELTASZ + ( SZ * PC ) ; print SI, " ", PC,SZ }}; /^ delta is/{if (SI > 1 ) { SZ = $15 * 1; TOTDEBSZ = TOTDEBSZ + SZ ; PC = $3 * 1 ; TOTDELTASZ = TOTDELTASZ + ( SZ * PC ) ; print SI, " ", PC,SZ }}; END{printf("# total %d new debs processed, %d KB installed size\n",N,TOTDEBSZ); if(TOTDEBSZ>0){printf("#size average=%d\n", TOTDELTASZ / TOTDEBSZ );};}' > $sizes_by_instsize sizes_by_instsize_avg=`tail -1 $sizes_by_instsize | cut -d= -f2 ` if grep -q average $sizes_by_instsize ; then cat > $t < 1) { N = N+1 ; TOTSZ = TOTSZ + SZ ; TOTTIM = TOTTIM + $3 ; SP= $6 * 1 ; TSP=TSP+SP ; print SI, " ", SP }}; #/^Total running time:/{TOTTIM = $4 * 1 } END{if(TOTTIM>0){printf("#create average=%d\n", TOTSZ / TOTTIM )};}' > $delta_speeds_by_instsize delta_speeds_by_instsize_avg=`tail -1 $delta_speeds_by_instsize | cut -d= -f2` patch_speeds_by_instsize=`tempfile` zcat "$@" | awk '/NEW/{P=$3 ; SI=$9 * 1}; /^ deb delta is/{SZ = $16 * 1 ;} /^ delta is/{SZ = $15 * 1 ;} / Patching done/{ if( SI > 1) {N = N+1 ; TOTSZ = TOTSZ + SZ ; TOTTIM = TOTTIM + ($4 * 1 ); SP= $6 * 1 ; TSP=TSP+SP ; print SI, " ", SP }}; END{if(TOTTIM>0){printf("#patch average=%d\n", TOTSZ / TOTTIM )} ; }' > $patch_speeds_by_instsize patch_speeds_by_instsize_avg=`tail -1 $patch_speeds_by_instsize | cut -d= -f2` if grep -q average $delta_speeds_by_instsize && grep -q average $patch_speeds_by_instsize ; then cat > $t < "${PREFIX}.txt" rm $t $sizes_by_instsize $delta_speeds_by_instsize $patch_speeds_by_instsize debdelta/contrib/parse_dist0000755000000000000000000000125312436652141013240 0ustar #!/usr/bin/python import sys, shelve if len(sys.argv) <= 1: print 'Usage: ',sys.argv[0],' python-database [ Debian "Package" from dists]' sys.exit(0) def de_n(a): if a and a[-1] == '\n' : a = a[:-1] return a def parse_dist(f,d): a=f.readline() p={} while a: if a[:4] in ('Pack','Vers','Arch','Stat','Inst','File'): a=de_n(a) i=a.index(':') assert(a[i:i+2] == ': ') p[a[:i]] = a[i+2:] elif a == '\n': d[p['Package']] = p p={} a=f.readline() d = shelve.open(sys.argv[1]) for n in sys.argv[2:]: f=open(n) parse_dist(f,d) f.close() d.close() debdelta/contrib/speeds_histogram.awk0000644000000000000000000000040312436652141015215 0ustar /^OLD/{P=$3 ; OV= $5; }; /^NEW/{P=$3 ; NV= $5; Z=$0}; /delta time.*bsdiff/{ bt= $10 * 1 ; dt = $3 * 1 ; if ( dt > 1 ) { C = C + bt / dt ; N= N + 1 ; if( bt < 0.4 * dt ) { print 100 * $10 / $3 , P, OV, NV, "'" $0 "'" } }} END{print "average", 100*C/N } debdelta/contrib/debmirror_2.4_--trash_option.patch0000644000000000000000000000637612436652141017504 0ustar --- debmirror-2.4/debmirror 2009-12-19 22:21:28.000000000 +0100 +++ debmirror 2010-01-19 11:29:40.000000000 +0100 @@ -62,6 +62,7 @@ [--di-dist=foo[,bar,..] ...] [--di-arch=foo[,bar,..] ...] [--source|--nosource] [--i18n] [--getcontents] [--md5sums] [--ignore-missing-release] [--ignore-release-gpg] + [--trash=dir] [--ignore=regex] [--exclude=regex] [--include=regex] [--exclude-deb-section=regex] [--limit-priority=regex] [--timeout=seconds] [--max-batch=number] @@ -272,6 +273,10 @@ Don't fail if the F file is missing. If the file does exist, it is mirrored and verified, but any errors are ignored. +=item --trash=dir + +When cleaning up, instead of deleting, move files in this directory. + =item --ignore=regex Never delete any files whose filenames match the regex. May be used multiple times. @@ -520,6 +525,8 @@ use Digest::MD5; use Digest::SHA1; use LWP::UserAgent; +use File::Basename; +use File::Copy; # Yeah, I use too many global variables in this program. our $mirrordir; @@ -534,6 +541,7 @@ our $cleanup=0; our $post_cleanup=1; our $no_cleanup=0; +our $trash_dir=0; our $do_source=1; our $host="ftp.debian.org"; our $user="anonymous"; @@ -630,6 +638,7 @@ 'rsync-extra=s' => \@rsync_extra, 'cleanup' => \$cleanup, 'postcleanup' => \$post_cleanup, + 'trash=s' => \$trash_dir, 'nocleanup' => \$no_cleanup, 'ignore=s' => \@ignores, 'exclude=s' => \@excludes, @@ -1402,6 +1411,24 @@ exit; +sub delete_or_trash { + my $file = shift; + if ( ! $trash_dir ) { + say("deleting $file") if ($verbose); + if (! $dry_run) { + unlink $file or die "unlink $file: $!"; + } + } else { + say("trashing $file") if ($verbose); + if (! $dry_run) { + rename ($file, ($trash_dir . "/" . basename($file) )) or + { copy ($file, ($trash_dir . "/" . basename($file) )) and + unlink $file + } or die "cannot move or copy $file to trash: $!"; + } + } +} + sub print_dl_size { my $size=shift; my $unit; @@ -2255,10 +2282,7 @@ foreach my $file (sort keys %files) { next if (@di_dists && $file =~ m:installer-\w+/current/images/:); if ($files{$file} == 2 && -f $file) { - say("deleting $file") if ($verbose); - if (! $dry_run) { - unlink $file or die "unlink $file: $!"; - } + delete_or_trash($file) } } } else { @@ -2273,10 +2297,7 @@ next if (@di_dists && $file =~ m:installer-\w+/current/images/:); unless ((exists $files{$file} && $files{$file} != 2) or (defined($ignore) && $file=~/$ignore/o)) { - say("deleting $file") if ($verbose); - if (! $dry_run) { - unlink $file or die "unlink $file: $!"; - } + delete_or_trash($file) } } } @@ -2419,10 +2440,7 @@ $file=~s:^\./::; if (! exists $di_files{$image_dir} || ! exists $di_files{$image_dir}{$file}) { next if (exists $di_files{$image_dir} && $file eq "MD5SUMS"); - say("deleting $image_dir/$file") if ($verbose); - if (! $dry_run) { - unlink "$file" or die "unlink $image_dir/$file: $!\n"; - } + delete_or_trash($file); } } chdir("$mirrordir") or die "unable to chdir($tempdir): $!\n"; debdelta/contrib/debmarshal_list_useless_debs0000755000000000000000000000722112436652141017001 0ustar #!/usr/bin/perl -w # # # Scan pool and dist directories and snapshots. # List any deb that is not in a dist. # # Copyright 2010 Google Inc. , 2011 A. Mennucci # # downloaded from # http://debmarshal.googlecode.com/svn/trunk/repository2/pooldebclean/pooldebclean.pl # and then slighlty patched # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: Drake Diedrich , A. Mennucci use Getopt::Long; use Pod::Usage; use DirHandle; use FileHandle; use File::Path; use File::Find; use strict; # # Return a list of files of all the Packages files # sub packages_files($); sub packages_files($) { my ($dir) = @_; my (@packages); find(sub {/^Packages$/ && -f $_ && push(@packages,$File::Find::name); }, $dir); @packages; } # # Parse an open filehandle that is a Packages file for the complete # list of .debs that are indexed in a repository. # sub parse_packages($$) { my ($fh,$packages) = @_; while (my $line = $fh->getline) { if ($line =~ /^Filename:\s*(\S+)\s*$/) { $packages->{$1}++; } } } sub purge_pool($$$$); sub purge_pool($$$$) { my ($dir,$path,$packages,$unlink) = @_; my $dh = new DirHandle $dir; while (my $de = $dh->read) { my $fullpath = "$dir/$de"; next if $de eq '.' || $de eq '..'; if (-d $fullpath) { purge_pool("$dir/$de","$path/$de",$packages,$unlink); } elsif (-f $fullpath) { if ($de =~ /\.deb$/) { if (!defined $packages->{"$path/$de"}) { &{$unlink}($fullpath); } } } } } sub pooldebclean($) { my ($repository) = @_; my (%packages); if (! -d $repository) { return ["$repository/ does not exist", 2]; } if (! -d "$repository/dists") { return ["$repository/dists/ does not exist", 2]; } if (! -d "$repository/pool") { return ["$repository/pool/ does not exist",2]; } my (@packages) = packages_files("$repository/dists"); foreach my $package (@packages) { my $packagefh = new FileHandle $package; parse_packages($packagefh,\%packages); } purge_pool("$repository/pool", "pool", \%packages, sub {print @_ ; print "\n" ;} ); [undef, 0]; } # main() # # Parse options, print usage, and return with exit codes. # sub main { my %options; my $result = GetOptions(\%options, 'help|?', 'man') or pod2usage(2); pod2usage(1) if $options{'help'}; pod2usage(-verbose => 2) if $options{'man'}; if (@ARGV != 1) { pod2usage("$0: Repository directory required.\n"); } my ($inputdir) = @ARGV; my ($rcmsg,$rc) = @{pooldebclean($inputdir)}; print STDERR $rcmsg if defined $rcmsg; $rc; } if (!caller()) { main(); } else { return 1; } __END__ =head1 NAME debmarshal_list_useless_debs - list unused .deb pool files from a repository =head1 SYNOPSIS debmarshal_list_useless_debs {repository directory} Options: -help brief help message -man full documentation =head1 OPTIONS =over 8 =item B<-help> Print a brief help message and exits. =item B<-man> Prints the manual page and exits. =back =head1 DESCRIPTION B will list all the unused .debs in a repository pool, including debmarshal snapshot and regular Debian repositories. =cut debdelta/contrib/debmirror-deltas~deprecated0000755000000000000000000002765212436652141016574 0ustar #!/bin/bash -e # A complex script that creates a repository of deltas, that can be # used by debdelta-upgrade for upgrading packages. # See also --help below. # WARNING : after the release of Squeeze, this script is deprecated: # indeed the new archive signatures need debmirror >= 2.6, # and I did not prepare a 'trash' patch for it, but instead wrote # a new script to use the new 'debmirror --marshal' option # Copyright (C) 2006-11 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later ########### customize under here vvvvvvvvvvvvv #where is the full mirror of debian debmir=/mirror/debian #where to mirror from host=ftp.debian.org #where to store deltas deltamir=/mirror/debian-deltas #delete old debs from trash if the space here drops below kB : minfreespace=2000000 #delete deltas and old_debs that are older than days (regardless of space) old_delete_days=50 #where to keep snapshots of the older indexes of the archives (for the --snapshot option) dists_snapshots=/mirror/debian-snapshots #where the gnupg stuff specific to debdelta resides # and in particular the public and private keyrings GNUPGHOME=/var/lib/debdelta/gnupg #where the gnupg agent variable is stored #(unset this if you do not want to use the agent) # note that this needs gnupg2 >= 2.0.11 (that is not lenny) GNUPGAGENTINFO="${GNUPGHOME}/debdeltas-gpg-agent-info" #the secret key to sign the deltas GNUPGSEC=0xTHESECKEY #note: if you export this , then it will affect also debmirror: # make sure the deb repository key is copied there #export GNUPGHOME #where is the debdeltas program debdeltas=/usr/bin/debdeltas #options to your taste debdelta_opt="-v -d --test --delta-algo xdelta3 --gpg-home $GNUPGHOME --sign $GNUPGSEC " ##if you apply the 'trash patch' in contrib to debmirror, then you may use this: debmirror_opt="--trash $deltamir/old_debs --method=http --diff=none" trash=$deltamir/old_debs debmirror=/usr/local/bin/debmirror_2.4_mine ##otherwise this should work as well #debmirror_opt='--method=http' #trash='' #debmirror=/usr/bin/debmirror #note: files in $trash will be deleted by this script! use a dedicated dir! #this should be in the same partition as the debian mirror $debmir # (must be, if the trash is not used) TMPDIR=/mirror/tmp export TMPDIR ARCHc='i386,amd64' ARCHs='i386 amd64' DISTc='lenny,squeeze,sid,experimental' DISTs='lenny squeeze sid experimental' ########### customize over here ^^^^^^^^^^^^^^ #since the output is automatically parsed, do it in English unset LANG unset LC_ALL unset LC_NUMERIC startgpgagent () { gpg-agent --homedir "${GNUPGHOME}" --daemon --write-env-file "$GNUPGAGENTINFO" ; } ##enable this after catastrofic failures, to try to create all leftover deltas ## parsing all olddists once again RECOVER='' DEBUG='' VERBOSE='' RM='rm' DO_MIRROR=true DO_CLEAN_DELTAS='' DO_SNAPSHOT='' while [ "$1" ] ; do case "$1" in -h|--help) cat < $t gpg2 --quiet --homedir "${GNUPGHOME}" -o /dev/null --sign $t rm $t exit 0 ;; *) echo "Unknown option $1 , try --help" ; exit 1 ;; esac shift done #################### set gpg stuff, test it gpgagentcmd="$0 --start-gpg-agent" # set gpg-agent variables, test it if test "$GNUPGAGENTINFO" ; then if test ! -r "$GNUPGAGENTINFO" ; then echo ERROR no agent info, please start the agent with echo $gpgagentcmd exit 1 else . "$GNUPGAGENTINFO" export GPG_AGENT_INFO if test ! "${GPG_AGENT_INFO}" -o ! -e "${GPG_AGENT_INFO/:*/}" -o ! -O "${GPG_AGENT_INFO/:*/}" ; then echo ERROR agent info is not OK, please run the command echo $gpgagentcmd exit 1 elif ! echo | gpg-connect-agent --homedir ${GNUPGHOME} ; then echo ERROR agent is not responding, please run the command echo $gpgagentcmd exit 1 fi fi fi #test that we can sign, possibly loading the password in the agent if test "$GNUPGSEC" ; then t=`tempfile` echo pippo > $t if ! gpg2 --quiet --batch --homedir "${GNUPGHOME}" -o /dev/null --default-key $GNUPGSEC --sign $t ; then echo signature test FAILED rm $t exit 1 fi rm $t fi ###################### some useful variables #the lock used by debmirror debmirlock=$debmir/Archive-Update-in-Progress-`hostname -f` b=`basename $0` lockfile -r 10 /tmp/$b.lock || exit 1 trap "rm $VERBOSE -f /tmp/$b.lock" 0 olddists="" log=/nonexistant ## profile debdeltas #debdeltas="python -m cProfile -o /tmp/debdelta-profile-$(dirname $p | tr / _ ) $debdeltas" today=`date +'%F'` yyyymm=`date +'%Y-%m'` ################## routines freetrash () { test "$trash" || return age=${old_delete_days} blocksize=`stat -f -c "%s" "$trash"` newsize=0 #trick to run at least once while test $age -ge 1 -a $newsize -le $minfreespace ; do find "$trash" -name '*.deb' -type f -mtime +$age | xargs -r $RM $VERBOSE || true freeblocks=`stat -f -c "%a" "$trash"` newsize=`expr $freeblocks \* $blocksize / 1024` age=`expr $age - 5` done } cleanuppool () { find $deltamir/pool \ \( -name '*debdelta-fails' -or -name '*debdelta-too-big' \ -or -name '*debdelta' \) -mtime +${old_delete_days} -type f |\ xargs -r $RM $VERBOSE || true find $deltamir/pool/ -type d -empty | xargs -r rmdir $VERBOSE || true if test "$trash" ; then find $trash -type f -not -name '*.deb' | xargs -r $RM $VERBOSE || true fi } run_debmirror () { if test -e $debmirlock ; then echo Archive $debmir is locked exit 1 fi olddists=${TMPDIR:-/tmp}/olddists-`date +'%F_%H-%M-%S'` mkdir $olddists tm=$olddists/debmirror-stdout tme=$olddists/debmirror-stderr cp -a $debmir/dists $olddists ## if using trash, this is not needed : debdeltas will ##go fishing in the trash dir to find all deleted deltas if ! test "trash" ; then cp -al $debmir/pool $olddists fi $debmirror $debmir --nosource \ $debmirror_opt --arch=$ARCHc \ --section=main,contrib,non-free \ -v $DEBUG -h $host -d $DISTc \ > $tm 2> $tme ; dme=$? if test "$dme" != 0 ; then echo debmirror failed , exit code $dme , stdout $tm awk '{ print "> " $0 }' $tm echo debmirror failed stderr $tme awk '{ print "> " $0 }' $tme $RM -f $debmirlock || true #sometimes the temporary directory of debmirror is messed up #rm -r $debmir/.temp cleanuppool freetrash exit 1 else $RM $VERBOSE $tm $tme fi } findmelog () { date=`date +'%F-%H'` log=$deltamir/log/$yyyymm/${date}.log err=$deltamir/log/$yyyymm/${date}.err if test -r $log -o -r $log.gz -o -r $err -o -r $err.gz ; then for i in 0 1 2 3 4 5 6 7 8 9 a b c d e f g h i j k l m n o p q r s t u v w x y z ; do if test -r $log -o -r $log.gz -o -r $err -o -r $err.gz ; then log=$deltamir/log/$yyyymm/${date}.$i.log err=$deltamir/log/$yyyymm/${date}.$i.err fi done fi } created_deltas="" run_debdeltas () { if ! test "$olddists" -a -r "$olddists/dists" ; then echo "run_debdelta : bad $olddists" return fi findmelog ( exec >> $log exec 2>> $err echo -n --------------oldnew pass---- ; date ; echo "------ tmpdir was in $olddists" echo -n ---- options to debdelta ; echo -- $debdelta_opt for dist in $DISTs ; do echo -------------- $dist ---- #lenny contains debdelta 0.27, that does not understand lzma if test "$dist" = lenny ; then x="--disable-feature lzma" ; else x="" ; fi for p in $( cd $debmir ; find dists/$dist/ -name Packages.gz) ; do if test -r ${dists_snapshots}/before.4/$p ; then o="--old ${dists_snapshots}/before.4/$p" ; else o='' ; fi echo ------ debdelta $x $o --old $olddists/$p $p $debdeltas $VERBOSE $debdelta_opt \ --alt $trash $x $o \ --old $olddists/$p \ --dir $deltamir// $debmir/$p done done ) #uncomment for added verbosity #egrep -3 -i 'error|warning' $log || true if test -s $err || grep -iq 'error' $log ; then #some error occurred test ! -r "$log".gz && { gzip -9 $log ; log=${log}.gz ; } echo "-----ERRORS--- full log is in $log or ...err.gz , data in $olddists " else rm $err # it is empty #no error occurred, clean up if test "$olddists" ; then if test -r "$olddists/dists" ; then $RM -r "$olddists/dists" fi if test -r "$olddists/pool" ; then $RM -r "$olddists/pool" fi #this is a bit exotic , but unfortunately 'test' does not test empty dirs if find "$olddists" -maxdepth 0 -type d -empty | grep -q "$olddists" ; then rmdir "$olddists" ; else echo Not empty "$olddists" ; fi fi if grep -v '^---' $log | grep -v '^ Total running time' | grep -q '.' ; then test ! -r "$log".gz && gzip -9 $log created_deltas=1 else $RM $log fi fi } run_debdeltas_clean () { echo -------------- cleanup delta pool $debdeltas --clean-deltas -n 0 \ --dir $deltamir// \ $( for arch in $ARCHs ; do for sec in main contrib non-free ; do for dist in $DISTs ; do echo $debmir/dists/$dist/$sec/binary-$arch/Packages.gz done ; done ; done ) } ########################################### code #always free space from the trash freetrash if test `stat -f -c "%a" "$deltamir"` -le 32 ; then echo "------- emergency pool cleanup , very low disk space in delta mirror" run_debdeltas_clean cleanuppool elif test "${DO_CLEAN_DELTAS}" ; then run_debdeltas_clean cleanuppool fi test -d $deltamir/log/$yyyymm || mkdir $deltamir/log/$yyyymm if test "${DO_SNAPSHOT}" ; then if test -e $debmirlock ; then echo Archive $debmir is locked, cannot --snapshot elif test -w ${dists_snapshots} -a -d ${dists_snapshots} ; then test -e ${dists_snapshots}/before.4 && rm -r ${dists_snapshots}/before.4 test -e ${dists_snapshots}/before.3 && mv ${dists_snapshots}/before.3 ${dists_snapshots}/before.4 test -e ${dists_snapshots}/before.2 && mv ${dists_snapshots}/before.2 ${dists_snapshots}/before.3 test -e ${dists_snapshots}/before.1 && mv ${dists_snapshots}/before.1 ${dists_snapshots}/before.2 mkdir ${dists_snapshots}/before.1 cp -a $debmir/dists ${dists_snapshots}/before.1/. else echo Please set dists_snapshots to a directory where I can write, not ${dists_snapshots} 1>&2 fi fi if [ "${DO_MIRROR}" ] ; then run_debmirror else echo ---------- skipped mirror step fi if [ "$olddists" -a -r "$olddists" ] ; then #the mirror was updated, so we check the difference run_debdeltas else test "$DEBUG" && echo olddists is not defined or is not readable : "$olddists" fi ##enable this to try to create all leftover deltas if test "$RECOVER" ; then for olddists in ${TMPDIR:-/tmp}/olddists* ; do if [ "$olddists" -a -r "$olddists" ] ; then echo ----------------------------------- creating deltas wrt $olddists run_debdeltas fi done fi debdelta/contrib/debmirror-marshal-deltas0000755000000000000000000002747712436652141016007 0ustar #!/bin/bash set -e # A complex script that creates a repository of deltas, that can be # used by debdelta-upgrade for upgrading packages. # See also --help below. # Copyright (C) 2006-11 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later #since the output is automatically parsed, do it in English unset LANG unset LC_ALL unset LC_NUMERIC startgpgagent () { gpg-agent --homedir "${GNUPGHOME}" --daemon --write-env-file "$GNUPGAGENTINFO" ; } DEBUG='' VERBOSE='' RM='rm' MV='mv' DO_MIRROR=true DO_CLEAN_DELTAS='' DO_CLEAN_DEBS='' CONF='' STARTAGENT='' while [ "$1" ] ; do case "$1" in -h|--help) cat < $t gpg2 --quiet --homedir "${GNUPGHOME}" -o /dev/null --sign $t rm $t exit 0 fi #################### set gpg stuff, test it gpgagentcmd="$0 -c $CONF --start-gpg-agent" # set gpg-agent variables, test it if test "$GNUPGAGENTINFO" ; then if test ! -r "$GNUPGAGENTINFO" ; then echo ERROR no agent info, please start the agent with echo $gpgagentcmd exit 1 else . "$GNUPGAGENTINFO" export GPG_AGENT_INFO if test ! "${GPG_AGENT_INFO}" -o ! -e "${GPG_AGENT_INFO/:*/}" -o ! -O "${GPG_AGENT_INFO/:*/}" ; then echo ERROR agent info is not OK, please run the command echo $gpgagentcmd exit 1 elif ! echo | gpg-connect-agent --homedir ${GNUPGHOME} ; then echo ERROR agent is not responding, please run the command echo $gpgagentcmd exit 1 fi fi fi #test that we can sign, possibly loading the password in the agent if test "$GNUPGSEC" ; then t=`tempfile` echo pippo > $t if ! gpg2 --quiet --batch --homedir "${GNUPGHOME}" -o /dev/null --default-key $GNUPGSEC --sign $t ; then echo signature test FAILED rm $t exit 1 fi rm $t fi ###################### some useful variables #the lock used by debmirror debmirlock=$debmir/Archive-Update-in-Progress-`hostname -f` b=`basename $0` mylockfile=${TMPDIR:-/tmp}/$b.lock lockfile -r 10 $mylockfile || exit 1 trap "rm $VERBOSE -f $mylockfile" 0 log=/nonexistant ## profile debdeltas #debdeltas="python -m cProfile -o /tmp/debdelta-profile-$(dirname $p | tr / _ ) $debdeltas" today=`date +'%F'` yyyymm=`date +'%Y-%m'` ################## routines clean_old_deltas () { find $deltamir/pool \ \( -name '*debdelta-fails' -or -name '*debdelta-too-big' \ -or -name '*debdelta' \) -mtime +${old_delete_days} -type f |\ xargs -r $RM $VERBOSE || true find $deltamir/pool/ -type d -empty | xargs -r rmdir $VERBOSE || true } run_debmirror () { if test -e $debmirlock ; then echo Archive $debmir is locked exit 1 fi echo -n "---start mirroring at " >> $log ; date --utc >> $log tm=`tempfile` tme=`tempfile` $debmirror $debmir --nosource \ $debmirror_opt --arch=$ARCHc \ --section=$SECTIONc \ -v $DEBUG -h $host -d $DISTc \ > $tm 2> $tme ; dme=$? if test "$dme" != 0 ; then echo debmirror failed , exit code $dme , stdout $tm awk '{ print "> " $0 }' $tm echo debmirror failed stderr $tme awk '{ print "> " $0 }' $tme $RM -f $debmirlock || true #sometimes the temporary directory of debmirror is messed up #rm -r $debmir/.temp clean_old_deltas rm $log exit 1 else $RM $VERBOSE $tm $tme fi echo -n "---end mirroring at " >> $log ; date --utc >> $log } findmelog () { date=`date +'%F-%H'` log=$deltamir/log/$yyyymm/${date}.log if test -r $log -o -r $log.gz ; then for i in 0 1 2 3 4 5 6 7 8 9 a b c d e f g h i j k l m n o p q r s t u v w x y z ; do if test -r $log -o -r $log.gz ; then log=$deltamir/log/$yyyymm/${date}.$i.log fi done fi } created_deltas=0 run_debdeltas () { debdeltas_errors=0 echo -n "--------- running debdeltas ------" >> $log ; date --utc >> $log ; echo "---- options ---- $debdelta_opt " >> $log for dist in $DISTs ; do dist_created_deltas=0 distlog=`tempfile` #find latest pushd $debmir/dists/$dist/ > /dev/null latest=$(echo [0-9] [0-9][0-9] [0-9][0-9][0-9] [0-9][0-9][0-9][0-9] \ | tr ' ' '\n' | sort -n | tail -n 1) let prev=latest || true while test "$prev" -gt 1 && cmp -s Release $prev/Release ; do let prev=prev-1 || true done popd > /dev/null echo -------------- $dist ---- >> $distlog echo -n "---Release " >> $distlog ; grep ^Date $debmir/dists/$dist/Release >> $distlog echo -n "---Start at " >> $distlog ; date --utc >> $distlog #lenny contains debdelta 0.27, that does not understand lzma if test "$dist" = lenny ; then x="--disable-feature lzma,xz" ; #squeeze contains debdelta 0.39trl, that does not understand xz elif test "$dist" = squeeze ; then x="--disable-feature xz" ; else x="" ; fi for sec in $SECTIONs ; do for arch in $ARCHs ; do sec_arch_dist_log=`tempfile` sec_arch_dist_err=`tempfile` wo="--old $debmir/dists/$dist/$prev/$sec/binary-$arch/Packages.gz" if test "$dist" = squeeze-updates ; then wo="$wo --old $debmir/dists/squeeze/$sec/binary-$arch/Packages.gz" fi OV=0 ; while test $OV -lt $prev ; do if test -r $debmir/dists/$dist/$OV/$sec/binary-$arch/Packages.gz ; then wo="$wo --old $debmir/dists/$dist/$OV/$sec/binary-$arch/Packages.gz" fi let OV=OV+10 done echo "------ $sec $arch -- (debdelta options += $x $wo )" >> $sec_arch_dist_log if $debdeltas $VERBOSE $debdelta_opt $x $wo --dir $deltamir// \ $debmir/dists/$dist/$sec/binary-$arch/Packages.gz >> $sec_arch_dist_log 2>> $sec_arch_dist_err ; then #todo, only write if something happened cat $sec_arch_dist_log >> $distlog else debdeltas_errors=$? cat $sec_arch_dist_log >> $distlog echo "--- debdeltas failed above, ret = ${debdeltas_errors}" >> $distlog echo "--- debdeltas failed on $dist $sec $arch" fi if test -s "$sec_arch_dist_err" ; then debdeltas_errors=11111 #make sure we keep this log echo "--- debdeltas stderr " >> $distlog cat $sec_arch_dist_err >> $distlog echo "--- end of debdeltas stderr " >> $distlog echo "--- debdeltas produced stderr on $dist $sec $arch" fi rm $sec_arch_dist_log $sec_arch_dist_err done ; done if grep -v '^ Total running time' $distlog | grep -v '^---' | grep -q '.' ; then let created_deltas=created_deltas+1 dist_created_deltas=1 fi if test "${debdeltas_errors}" -gt 0 -o "${dist_created_deltas}" -gt 0 ; then #only log distributions stanzas when some deltas were generated, or errors occourred cat $distlog >> $log if test "${publish_deltas}" ; then echo -n "--- publishing starts at " >> $log ; date --utc >> $log if ${publish_deltas} ; then echo -n "--- publishing ends at " >> $log ; date --utc >> $log else echo -n "--- publishing FAILS, at " >> $log ; date --utc >> $log echo "--- publishing FAILS" fi fi #else # echo SKIPPED THIS STANZA ; cat $distlog fi rm $distlog done gzip -9 $log if test "$debdeltas_errors" -gt 0 ; then #some error occurred echo "-----ERRORS--- full log is in $log.gz " else #no error occurred, clean up if test "$created_deltas" = 0 ; then $RM $log.gz fi fi } run_debdeltas_clean () { echo -------------- cleanup delta pool $debdeltas --clean-deltas -n 0 \ --dir $deltamir// \ $( for arch in $ARCHs ; do for sec in $SECTIONs ; do for dist in $DISTs ; do echo $debmir/dists/$dist/$sec/binary-$arch/Packages.gz done ; done ; done ) } debmarshal_trim_snapshots () { rectcode=1 for dist in $DISTs ; do pushd $debmir/dists/$dist/ > /dev/null #act only if there are at least 3 snapshots if test -d 0 -a -d 1 -a -d 3 ; then latest=$(echo [0-9] [0-9][0-9] [0-9][0-9][0-9] [0-9][0-9][0-9][0-9] \ | tr ' ' '\n' | sort -n | tail -n 1) #check sanity sane=1 ; t=0 ; f=1 ; while test "$f" -le "$latest" ; do if test ! -d "$f" ; then echo "Warning, ignoring $debmir/dists/$dist , the snapshot $f does not exist" sane=0 elif test $t -nt $f ; then echo "Warning, ignoring $debmir/dists/$dist , the snapshot $f is older than the $t " sane=0 fi let f++ ; let t++ ; done # if sane and too many snapshots, move all down a step if test "$sane" = 1 -a "$keep_marshal_snapshots" -a "$latest" -gt "$keep_marshal_snapshots" ; then retcode=0 let d=latest-keep_marshal_snapshots t=0 #remove older while test "$t" -le "$d" -a -d "$d" ; do $RM -r $t ; let t++ ; done #move down others t=0 let f=d+1 while test "$f" -le "$latest" ; do $MV $f $t let t++ let f++ done let t-- rm latest ln -s $t latest echo "------------ deleted revisions from 0 to $d in $dist (latest now points to $t)" fi # end of " if sane and too many snapshots" fi #end of "if at least 3 snapshots" popd > /dev/null done return $retcode } clean_old_debs () { if test -e $debmirlock ; then echo Archive $debmir is locked exit 1 fi echo -------------- cleanup mirror if debmarshal_trim_snapshots ; then z=`tempfile` ${debmarshal_list_useless_debs} $debmir > $z ! grep -vx '.*\.deb' $z #(just in case) n=$(wc -l < $z) if test $n -lt $max_delete ; then echo "--- delete $n debs" tr '\n' '\000' < $z | xargs -r0 $RM rm $z else echo "--- will not delete $n debs (too many!), the list is in $z" fi fi } ########################################### code if test `stat -f -c "%a" "$deltamir"` -le 512 ; then echo "------- emergency delta pool cleanup , very low disk space in delta mirror" run_debdeltas_clean clean_old_deltas elif test "${DO_CLEAN_DELTAS}" ; then run_debdeltas_clean clean_old_deltas fi if test `stat -f -c "%a" "$debmir"` -le 512 ; then echo "------- emergency debs pool cleanup , very low disk space in debs mirror" clean_old_debs elif test "${DO_CLEAN_DEBS}" ; then clean_old_debs fi test -d $deltamir/log/$yyyymm || mkdir $deltamir/log/$yyyymm findmelog if [ "${DO_MIRROR}" ] ; then run_debmirror else echo ---------- skipped mirror step fi #the mirror was updated, so we check the difference run_debdeltas debdelta/contrib/sizes_histogram.awk0000644000000000000000000000133412436652141015073 0ustar # this awk program scans the output of 'debdeltas -v' and prints statistics / deb delta is.*/{p = $4 * 1 ; s = $9 * 1 ; bucket = int(p / 5) ; count++; if (p <= 80 ) { cou ++; hist[ bucket ] ++ ; cumul += p ; savin += s };}; END{if(cou>0){ print "# ",int(cou * 100 /count) ,"% of deltas are <=80% of original;" print "# of those, the average percent is ",int(cumul / cou), "%" print "# of those, the average saving is ",int(savin / cou), "kB" print "# and the histogram is" val=0 while(val<=16) { #if (hist[val] > 0 ) { l=int(100*hist[val]/cou) printf("%2d %2d %s\n", val*5, l,substr("-------------------------------------------------------...",1,l) ) } val++ }} #for (val in hist) print val*5,int(100*hist[val]/cou) }; debdelta/contrib/debmirror-delta-security0000755000000000000000000000706512436652141016033 0ustar #!/bin/bash set -e # A simple script that will create a repository of deltas, that may be # used by debdelta-upgrade for upgrading package in stable and # stable-security # Copyright (C) 2006-11 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later # With a change by Torsten Spindler for use with Ubuntu ############################ WARNING ############## #this script is not completely satisfactory # situation, # baz_2_all.deb is in stable, but then # baz_2+1_all.deb enters as a "point release" in stable and replaces baz_2_all.deb # (this needs a delta for stable updates, but it will be managed in a different repo) # then there is a security update, baz_2+2_all.deb # now two deltas should be generated for security , # 2 -> 2+2 # 2+1 -> 2+2 # the first one is not generated by this simple script. ########################################## #who I am b=`basename $0` DEBUG='' VERBOSE='' [ "$1" = '-v' ] && { VERBOSE='-v' ; shift ; } [ "$1" = '-d' ] && { DEBUG='--debug' ; shift; } [ "$1" = '-v' ] && { VERBOSE='-v' ; shift ; } if [ "$1" = '' ] || test ! -r "$1" ; then echo please provide the configuration file as argument exit 1 fi . "$1" if [ -n "$TMPDIR" ]; then export TMPDIR fi # set gpg-agent variables, test it gpgagentcmd="gpg-agent --homedir \"${GNUPGHOME}\" --daemon --write-env-file \"$GNUPGAGENTINFO\" " if test "$GNUPGAGENTINFO" ; then if test ! -r "$GNUPGAGENTINFO" ; then echo ERROR no agent info, please start the agent with echo $gpgagentcmd exit 1 else . "$GNUPGAGENTINFO" export GPG_AGENT_INFO if test ! "${GPG_AGENT_INFO}" -o ! -e "${GPG_AGENT_INFO/:*/}" -o ! -O "${GPG_AGENT_INFO/:*/}" ; then echo ERROR agent info is not OK, please run the command echo $gpgagentcmd exit 1 elif ! echo | gpg-connect-agent --homedir ${GNUPGHOME} ; then echo ERROR agent is not responding, please run the command echo $gpgagentcmd exit 1 fi fi fi #test that we can sign, possibly loading the password in the agent if test "$GNUPGSEC" ; then t=`tempfile` echo pippo > $t if ! gpg2 --quiet --batch --homedir "${GNUPGHOME}" -o /dev/null --default-key $GNUPGSEC --sign $t ; then echo signature test FAILED rm $t exit 1 fi rm $t fi #make copy of current stable-security lists of packages olddists=${TMPDIR:-/tmp}/oldsecdists-`date +'%F_%H-%M-%S'` mkdir $olddists cp -a $secdebmir/dists $olddists #do mirror security trap "rm $VERBOSE -f $secdebmirlock ; echo FAILED , please delete $olddists" 0 #this version of 'debmirror' is patched to support the '--trash' option, # see in /usr/share/debdelta $DEBMIRROR $DEBUG $VERBOSE $secdebmir $DEBMIRROR_TRASH \ --method=$DEBMIRROR_METHOD --nosource -h $sechost \ -r $release -d ${secstable} --arch=$ARCHc \ $DEBMIRROR_OPTIONS #do create deltas lockfile -r 1 /tmp/$b.lock || exit 1 trap "rm $VERBOSE -f /tmp/$b.lock; echo FAILED , please delete $olddists " 0 cd $secdebmir for arch in $ARCHs ; do for sec in $SECTIONS; do $debdeltas $VERBOSE -v --test $debdelta_opt \ --old $fulldebmir/dists/${origstable}/$sec/binary-$arch/Packages.gz \ --old $olddists/dists/${secstable}/$sec/binary-$arch/Packages.gz \ --alt $deltamir/old_debs \ --dir $deltamir// dists/${secstable}/$sec/binary-$arch/Packages.gz done done #do clean up a bit trap "" 0 rm $VERBOSE -f /tmp/$b.lock rm -r $olddists find $deltamir/old_debs -type f -mtime +80 | xargs -r rm || true find $deltamir/pool \ \( -name '*debdelta-fails' -or -name '*debdelta-too-big' \ -or -name '*debdelta' \) -mtime +80 -type f |\ xargs -r rm || true debdelta/contrib/debs_size_pdf.sh0000644000000000000000000000044412436652141014312 0ustar #compute distribution (=integral) of probability of packages sizes in an archive find pool -type f -name '*deb' | \ xargs ls -l |\ awk '{p=$5 * 1; bucket = int(p/1024) ; count++; hist[ bucket ] ++; }\ END{ for (val in hist) print val,hist[val] }'|\ sort -n |\ awk '{b+=$2; print $1, b}' debdelta/contrib/debmirror-marshal-deltas.conf0000644000000000000000000000417012436652141016711 0ustar # Example configuration for debmirror-marshal-deltas # Copyright (C) 2006-11 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later ### what to mirror #where is stored the full mirror of debian debmir=/mirror/debian #where to mirror from host=ftp.debian.org ## debmirror options debmirror_opt="--debmarshal --method=http --diff=none" debmirror=/usr/bin/debmirror ARCHc='i386,amd64' ARCHs='i386 amd64' DISTc='lenny,squeeze,wheezy,sid,experimental' DISTs='lenny squeeze wheezy sid experimental' SECTIONc='main,contrib,non-free' SECTIONs='main contrib non-free' #how many snapshots to keep in the debs mirror # (as stored by the --debmarshal option) keep_marshal_snapshots=16 #when cleaning the debs mirror, do not delete anything # if the maximum number of files to be deleted exceeds this: max_delete=1500 #the tool that prints the list of 'useless debs' i.e. debs not # listed in any distribution debmarshal_list_useless_debs=/usr/share/debdelta/debmarshal_list_useless_debs ### how to delta #where to store deltas deltamir=/mirror/debian-deltas #delete deltas and old_debs that are older than days (regardless of space) old_delete_days=50 #where the gnupg stuff specific to debdelta resides # and in particular the public and private keyrings GNUPGHOME=/var/lib/debdelta/gnupg #where the gnupg agent variable is stored #(unset this if you do not want to use the agent) # note that this needs gnupg2 >= 2.0.11 (that is not lenny) GNUPGAGENTINFO="${GNUPGHOME}/debdeltas-gpg-agent-info" #the secret key to sign the deltas GNUPGSEC=0xTHATKEY #note: if you export this , then it will affect also debmirror: # make sure the deb repository key is copied there #export GNUPGHOME #where is the debdeltas program debdeltas=/usr/bin/debdeltas #options to your taste debdelta_opt="-v -d --test --delta-algo xdelta3 --gpg-home $GNUPGHOME --sign $GNUPGSEC " #command to copy created deltas in the public repository #e.g. #publish_deltas="rsync ...." publish_deltas="" #this should be in the same partition as the debian mirror $debmir # (otherwise the resulting deltas will have to copied from it) TMPDIR=/mirror/tmp export TMPDIR debdelta/contrib/debmirror-delta-security_debian.conf0000644000000000000000000000315212436652141020247 0ustar #where the security archive is sechost=security.debian.org release=debian-security #list of architecture (space separated, comma separated) ARCHs="i386 amd64" ARCHc="i386,amd64" # the archive sections to use SECTIONS="main contrib non-free" #where to download the full mirror of debian stable security secdebmir=/mirror/debian-security #the lock used by debmirror secdebmirlock=$secdebmir/Archive-Update-in-Progress-`hostname -f` #where is the full mirror of debian stable fulldebmir=/mirror/debian #the name by which "stable" is known in that mirror origstable="squeeze" #the corresponding path snippet for the security archive secstable="squeeze/updates" ##for ubuntu it may be #origstable="lucid" #secstable="lucid-security" #where the gnupg stuff specific to debdelta resides GNUPGHOME=/var/lib/debdelta/gnupg #where the gnupg agent variable is stored #(unset this if you do not want to use the agent) # note that this needs gnupg2 >= 2.0.11 (that is not lenny) GNUPGAGENTINFO="${GNUPGHOME}/debdeltas-gpg-agent-info" #the secret key to sign the deltas GNUPGSEC=THESECRETKEY #note: if you export this , then it will affect also debmirror # make sure the deb repository key is copied there #export GNUPGHOME #where to create the repository of deltas deltamir=/mirror/debian-security-deltas #where is the debdeltas program debdeltas=/usr/bin/debdeltas #options to your taste # for lenny #debdelta_opt=" -n 3 --disable-feature lzma --disable-feature xdelta3-fifo --gpg-home ~/debdelta/gnupg --sign $GNUPGSEC" # for squeeze debdelta_opt=" -n 3 --disable-feature xz --delta-algo xdelta3 --gpg-home ${GNUPGHOME} --sign $GNUPGSEC" debdelta/debdelta-upgrade0000777000000000000000000000000012436652141014314 2debdeltaustar debdelta/FAQ0000644000000000000000000000404612436652141010052 0ustar Q: How do I use 'debdelta' to upgrade my Debian box, and (possibly) speedup/save on bandwidth? A: Quick answer: apt-get update debdelta-upgrade apt-get upgrade Q: Do you use 'xdelta' or 'bsdiff' ? A: 'debdelta' has an option '--delta-algo' to choose between xdelta xdelta-bzip xdelta3 bsdiff Q: How do you integrate this approach with the minimal security that signed Release files give us today? A: 'debpatch' recreates the new .deb , and guarantees that it is equal to the one in archives, so archive signatures can be verified (for this reason, 'debdelta' does not use dpkg-repack ). Moreover, since version 0.31, delta files are GPG signed. Q: What if any or both the debs are signed using dpkg-sig ? A: Supported. Q: What about .debs where the data part is compressed with bzip ? A: supported since version 0.24 Q: And with lzma? A: Since version 0.31 Q: can 'debpatch' recreate the new .deb using the installed old .deb, even when there are dpkg-diversions ? A: yes. Q: can 'debpatch' recreate the new .deb using the installed old .deb, even when conf files were modified ? A: yes. Q: can 'debpatch' recreate the new .deb using the installed old .deb, when 'prelink' is used in the host? A: since version 0.27 Q: How does 'debdelta' work? What is the content of delta files? A: See the example script /usr/share/debdelta/debpatch.sh Q: What about backward compatibility? If I create a delta with the most recent version of 'debdelta' will another user be able to apply it with an older version of 'debpatch' ? A: It depends. A delta contains some keywords in the form 'need-xxxx' that express the requirements for applying it, so that 'debpatch' will graciously fail if it sees that it cannot satisfy the requirements, and ask to be upgraded. Q: What about backward compatibility, the other way round? If I created a delta long ago with a very old version of 'debdelta', will I be be able to apply it with a future version of 'debpatch' ? A: This should always be possible (but for bugs). debdelta/keyrings/0000755000000000000000000000000012436652141011347 5ustar debdelta/keyrings/pub.gpg0000644000000000000000000000707612436652141012646 0ustar *J<>ͥK=)sYɭ(H20QxLgP㎵&^\z![ ^[*͞$Q \Yeaݜ #.9o_F5cSvѴNnբ)k-Bm\hC Lb!}QjIz*{t@nF^u8 mN:} p#;Y|yWގ7;L``"…gdU~) Tu5)]{jF*>lRA!(i,3+l]l6} g5ij' [4Qcft[*4BtGӢt9s)cnd0\]h(R5byV6~J*IJ |ZJTBF~)UIInܭ k:4&ShOhCJFvz42Uv{~_B]?Uj0@X ҆ +xpBa\m6{3-@v;A0GN&CXz nO !RY^aTq-,"#IXA%l*ٴEDebian deltas automatic signing key (2009) v &J   C)YU}B;݆e:ZO؀ye&wNYJď]B6 4L/o\oFJ 3@Z˶9sA0A; dryP:\OOoCLdK P7Ҿa͞Pk*uk6 eN.j ( %UBG;nQFwP={sl@\XdE]Z߻NvuXb4hkնIeHD#v1gEzi0aЏ羳ȣi­ڠM^v8ЙyZH};?1g^dE-hʮdՑg3u~ir@&Ai}0[8ތWiAAտFDebian deltas automatic signing key (2012) >(P7Ҿ    3ʬws]$9O޼vr=1%0?3_NeäJCq}Dbe$LgXQnNe=iMSW[|h}Ֆ(ҍpUTsU, b$rW?^:S^}|>)=]CD4"9/$oc*aԛkcr4/O..mh[*X\f/eGwm$R+Y|;GH S'Ʃ *RloxI M!ER1Rҏo n,z\/QoYiAlL{nA'U"":}\" z/:h%༸cD%iZ뼩?#Tǻ;r {injz/) TH*\ h,{?5y+5xcW](q}.L.5y }new!  “f4 XhTD(`lKDebian deltas automatic signing key (2014) ='S'      ڲ,!"LwV+k 4y_>G.Fծ>ʜq@a\ʔJLDɶ{[]kN7;c6 kh/T߇v3&~K%II+䎁Ut KQZڬZؤ=OPTKF[shHo7y3&\-o/:?O~]*+!UzrKxryjefäs1`  S':yԮyBt'R$$؏ŗhRIvN7$9zr܅fev>[]<ic#ͨAŬK>jFikoz1PT x/{P2HJ[:нܒQj~y@ϋ)'ӧ8fKG?~re#eZ hy8rz7 ǰB :zt?bu.洫Nq: YU]ko%S'    ڲ,!Q4`3}_`<:zῚJH M)P̽,~giykX74xi=(0=K#кk>y'S~%)6valp;MTVw-C1jSRU"h̑Ћk(Fr4aO? !Q/+70f%Q>i0FD GW>΀j@EULF53\L5n_8Aҭ9ՒF6debdelta/COPYING0000644000000000000000000006130412436652141010553 0ustar GNU LIBRARY GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1991 Free Software Foundation, Inc. 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the library GPL. It is numbered 2 because it goes with version 2 of the ordinary GPL.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Library General Public License, applies to some specially designated Free Software Foundation software, and to any other libraries whose authors decide to use it. You can use it for your libraries, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library, or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link a program with the library, you must provide complete object files to the recipients so that they can relink them with the library, after making changes to the library and recompiling it. And you must show them these terms so they know their rights. Our method of protecting your rights has two steps: (1) copyright the library, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the library. Also, for each distributor's protection, we want to make certain that everyone understands that there is no warranty for this free library. If the library is modified by someone else and passed on, we want its recipients to know that what they have is not the original version, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that companies distributing free software will individually obtain patent licenses, thus in effect transforming the program into proprietary software. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License, which was designed for utility programs. This license, the GNU Library General Public License, applies to certain designated libraries. This license is quite different from the ordinary one; be sure to read it in full, and don't assume that anything in it is the same as in the ordinary license. The reason we have a separate public license for some libraries is that they blur the distinction we usually make between modifying or adding to a program and simply using it. Linking a program with a library, without changing the library, is in some sense simply using the library, and is analogous to running a utility program or application program. However, in a textual and legal sense, the linked executable is a combined work, a derivative of the original library, and the ordinary General Public License treats it as such. Because of this blurred distinction, using the ordinary General Public License for libraries did not effectively promote software sharing, because most developers did not use the libraries. We concluded that weaker conditions might promote sharing better. However, unrestricted linking of non-free programs would deprive the users of those programs of all benefit from the free status of the libraries themselves. This Library General Public License is intended to permit developers of non-free programs to use free libraries, while preserving your freedom as a user of such programs to change the free libraries that are incorporated in them. (We have not seen how to achieve this as regards changes in header files, but we have achieved it as regards changes in the actual functions of the Library.) The hope is that this will lead to faster development of free libraries. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, while the latter only works together with the library. Note that it is possible for a library to be covered by the ordinary General Public License rather than by this special one. GNU LIBRARY GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Library General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also compile or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. c) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. d) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Library General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public License for more details. You should have received a copy of the GNU Library General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! debdelta/debpatch-url0000777000000000000000000000000012436652141013475 2debdeltaustar debdelta/debdeltas.10000644000000000000000000001424112436652141011527 0ustar .TH DEBDELTAS "1" "aug 2009" "debdeltas" "User Commands" .SH NAME debdeltas \- compute deltas between Debian packages .SH SYNOPSIS .B debdeltas [\fIOPTION\fR]... \fIARGS\fR ... .SH DESCRIPTION .B debdeltas computes deltas between the old and new versions of Debian packages. The explanation of what a delta is is in .BR debdelta (1). .SH COMMAND ARGUMENTS In all of the following, \fIARGS\fR can be, a Debian binary file (usually, a file ending in \fI.deb\fR), or a directory containing such files, or a \fIPackages\fR file (that is, an index file such as those found in Debian mirrors - even the zipped ones - see .BR apt-ftparchive (1) ). We will call \fIcmdline\fR all such args that are not related to an option. As a first step, \fIdebdeltas\fR builds an internal list of Debian packages. To this end, it parses all \fIARGS\fR of the options \fB\-\-alt\fR , \fB\-\-old \fR, and those \fIARGS\fR given as non-option arguments (the aforementioned cmdline args). If the argument is a Debian package, \fIdebdeltas\fR adds it to the list; if the argument is a directory, \fIdebdeltas\fR scans it for Debian packages to be added to the list; if the argument is \fIPackages\fR files, \fIdebdeltas\fR parses it and adds all Debian packages to the list. Then debdeltas groups all found Debian packages by name and architecture. For each group, the newest cmdline version is isolated, and then deltas are computed from all --old versions to that version. These deltas are stored in many delta files with appropriate names of the form \fIname_oldversion_newversion_architecture.debdelta\fR ; the location of the delta files is specified by the "--dir" option. Note that the location of files in a \fIPackages\fR index is specified relative to the base of the mirror, e.g. .I Filename: pool/main/x/xxx/xxx_3_i386.deb For this reason, any \fIPackages\fR argument must presented with a long path (that contains at least the \fIdists\fR directory). Note also that the same directory or index can be provided many times, as --old, as --alt, and as cmdline argument. Note also that debdeltas will skip all packages that are smaller than 10KB. .SH MAIN OPTIONS .TP \fB\-\-dir \fIDIR force saving of deltas in this DIR (otherwise they go in the dir of the newer deb_file). .TP \fB\-\-alt \fIARGS\fR this is parsed as the non-option command line, but the found packages are never considered for a delta. They are considered though when --clean-deltas is used, to decide if to delete a delta; and are useful to find debs that are deleted from indexes and moved elsewhere. Multiple --alt can be specified. .TP .TP \fB\-\-old \fIARGS\fR this is parsed as the non-option command line, and the found packages are considered as the older versions. Multiple --old can be specified. Moreover, when scanning a \fIPackages\fR index, it is not an error if files do not exist, as long as they have been moved in a --alt directory. Note that, if no --old is specified, then no deltas will be generated. .TP \fB\-\-forensicdir \fIDIR write hashes files; these are to be compared with those produced by .I debdelta-upgrade --forensic=... when a delta fails .SH The double slash If a directory path is provided as argument to --dir, and it ends in // , then this triggers a specific behaviour related to files found in \fIPackages\fR indexes (as presented in the commandline). If a package in a index is stored in \fIpool/main/x/xxx/xxx_3_i386.deb\fR , and --dir is \fI/tmp/foobar//\fR , then the delta will be saved in \fI/tmp/foobar/pool/main/x/xxx/xxx_2_3_i386.debdelta\fR. The same is true for --alt (and is useful to find old versions of a package). .SH OTHER OPTIONS .TP .B --signing-key \fI KEY key used to sign the deltas (using GnuPG) .TP \fB\-n \fIN how many deltas to produce at maximum for each unique package/arch (default unlimited) .TP .B --clean-deltas delete deltas that upgrade to packages versions that are not found in the cmdline ARGS, and are at least two days old (according to mtime). .TP \fB\-\-no\-md5 do not include MD5 info in debdelta. .TP \fB\-\-needsold\fR create a patch that can only be used if the old .deb is available. .TP \fB\-\-delta-algo\fR \fIALGO use a specific backend for computing binary diffs; possible values are: xdelta xdelta-bzip xdelta3 bsdiff .TP \fB\-M \fIMb maximum memory to use (for 'bsdiff' or 'xdelta'). .TP \fB\--test check that the patch does apply. .TP \fB\-v verbose (can be added multiple times). .TP \fB\-d print traceback on errors; save useful info in temporary files in case that a backend crashes. (If '-d' is added multiple times, it also adds to the patches other extra debugging checks: only for advanced bug tracking). .TP \fB\-k keep temporary files (use for debugging). .TP .B --gpg-home specify a different home for GnuPG, default for root is .I /etc/debdelta/gnupg while for other users is unset. See .I --homedir in .BR gpg(1) for details. .TP .BI \--disable-feature \ FEATURE disable an internal feature. See the documentation in README.features. .TP .BI --no-act Just list the deltas that would be created. .SH SECURITY See .BR debdelta (1) .SH EXIT STATUS See .BR debdelta (1) .SH EXAMPLES .I debdeltas --dir /tmp/ --old ~/mydebs ~/mydebs will generate all deltas to upgrade from the old versions in ~/mydebs to the newest version in ~/mydebs , and store the deltas in /tmp/ .I debdeltas --dir /tmp// --old /mirror/debian/dists/lenny/main/binary-i386/Packages.gz /mirror/debian/dists/squeeze/main/binary-i386/Packages.gz will generate all deltas to upgrade main/i386 from lenny to squeeze, and store the deltas in a pool structure under /tmp/ See also the example scripts .I /usr/share/debdelta/debmirror-delta-security and .I /usr/share/debdelta/debmirror-deltas .SH "REPORTING BUGS" Report bugs to . .SH AUTHORS Debdelta was written and is copyright \(co 2006-09 Andrea Mennucci. .br This man page was written by Jegou Pierre-yves . .SH COPYING This is free software. You may redistribute copies of it under the terms of the GNU Library General Public License . There is NO WARRANTY, to the extent permitted by law. .SH "SEE ALSO" .BR debpatch (1), .BR debdelta (1), .BR /usr/share/doc/debdelta/README . debdelta/README.features0000644000000000000000000000672512436652141012223 0ustar When 'debdelta' is improved, features are added. In some cases, a delta created with a newer version of debdelta may not work with an older version of 'debpatch'. The feature that 'debdelta' or 'debdeltas' use when creating a delta depend on command line option, but also on the content of the deb files that are debdelta-ed. Whenever a feature is used in creating a delta, a special keywords of the form "needs-FEATURE" is added to the info (that is contained in the delta file, and can be browsed using 'debpatch --info'). An older version of debpatch will graciously exit if it finds in the info a feature that it does not recognize. To obtain backward compatibility, some features can be disabled by setting the commandline option --disable-feature Here is the listing of features, divided in sections --- delta format needs-old the delta was created with the option --needsold; such delta can only be applied if the old .deb is available, (but not using the installed version of the old deb). This is rarely used. --- binary delta backends needs-bsdiff this delta needs the program 'bsdiff' , that is available in the samename package; currently bsdiff is the default backend (see the option --delta-algo ). needs-xdelta this delta needs the program 'xdelta' that is available in the samename package; it is used if --delta-algo is set to xdelta needs-xdelta3 this delta needs the program 'xdelta3' that is available in the samename package; it is used if --delta-algo is set to xdelta3 The support for all 3 above was added before Debian/lenny. needs-xdelta3-fifo this delta needs xdelta3 at least version 0y , that supports reading the source thru a name FIFO. The support for this was added in 'debdelta' 0.39, and will be available in Debian/stable with the release of Debian/squeeze. If you want to create deltas that are compatible with Debian/lenny, use '--disable-feature xdelta3-fifo'. ---- compressor debdelta uses internally bzip2 and gzip in many places, and it depends on those, so there is no 'needs-bzip2' and 'needs-gzip'. For other compressors, things are as follows. needs-minibzip2 this delta needs the program 'minibzip2' that is available in the package 'debdelta' itself; it is used to recreate data.tar.bz2, (so that it is identical to the one created by dpkg-deb). The support for this was added before Debian/lenny. needs-lzma this delta needs the program 'lzma' that is available in the samename package. The support for this was added in 'debdelta' 0.31, and will be available in Debian/stable with the release of Debian/squeeze. 'lzma' is needed if the new or old deb contains data.tar.lzma (but this is not yet allowed in repositories, as of Aug 09). Moreover, 'lzma' may be used to compress the internal file 'patch.sh' in the delta file, if this provides a shorter file than 'bzip2' and 'gzip'. This latter usage can be disabled by setting '--disable-feature lzma' (and such option provides compatibility with debdelta in Debian/lenny). needs-xz this delta needs the program 'xz' that is available in package 'xz-utils'. 'xz' is needed if the new or old deb contains data.tar.xz , and this is allowed in the Debian archives since Jul 2011. Moreover, 'xz' may be used to compress the internal file 'patch.sh' in the delta file, if this provides a shorter file than 'bzip2' and 'gzip' or 'lzma'. This latter usage can be disabled by setting '--disable-feature xz' (and such option provides compatibility with debdelta in Debian/squeeze). debdelta/debdelta-upgrade.10000644000000000000000000001153412436652141012773 0ustar .TH DEBDELTA-UPGRADE "1" "aug 2009" "debdelta-upgrade" "User Commands" .SH NAME debdelta-upgrade \- Downloads all deltas that may be used to 'apt-get upgrade', and apply them. .SH SYNOPSIS .B debdelta-upgrade [\fIOPTION\fR]... [\fIPACKAGE\fR] ... .SH DESCRIPTION This program is designed to download changes (deltas) that may be used to .B apt-get upgrade all listed packages, and apply them. If no package is listed in the command line, then it will apply to all upgradable packages. See .BR debdelta (1) for more details on the delta files. .SH OPTIONS .TP \fB\-\-dir \fIDIR directory where to save results. (default: /var/cache/apt/archives for root, /tmp/archive for non-root users) .TP \fB\-\-format \fIFORMAT format of recreated debs. FORMAT=deb is the usual, FORMAT=unzipped means that the data.tar part is not compressed, (and this may save some time) .TP \fB\-\-deb\-policy \fIPOLICY policy to decide which debs to download, it is a comma separated list of (abbreviations of) source = there is no /etc/debdelta/sources.conf line big = the delta is too big error = the delta fails to apply unavailable = all other cases after = continue downloading debs after patching has ended default is s,b,e .TP \fB\-v verbose (can be added multiple times). .TP \fB\-d print full traceback on Python errors; save useful info in temporary files in case that a delta fails. .TP \fB\-k keep temporary files (use for debugging). .TP \fB \-A \--accept-unsigned accept unsigned deltas. .TP \fB \-\-gpg-home specify a different home for GnuPG, default for root is .I /etc/debdelta/gnupg while for other users is unset. See .I --homedir in .BR gpg(1) for details. .TP \fB \-\-forensic \fIMETHOD if a delta fails, report logs so that the problem may be addressed. Method may be do just prepare logs and say where they are mutt send logs by email using mutt icedove send logs by email using icedove (as root!) http send by http (the easiest and most recommended method!) .SH EXAMPLES When invoked as root, .I apt-get update && debdelta-upgrade && apt-get upgrade will download delta files, recreate the needed Debian packages, and then upgrade your Debian host. When only slow Internet connection is available, the above procedure may be significantly faster than "apt-get update && apt-get upgrade". (Use -v to see how much faster.) The package manager .I cupt uses deltas as well when upgrading. .SH SECURITY A Debian package that is recreated using .I debdelta-upgrade is byte-by-byte identical to the original one; when "apt-get upgrade" is subsequently invoked, the cryptographic archive authentication support (see .I apt-secure (8) ) will guarantee that it can be trusted to be installed. Moreover, the delta files themselves are cryptographically protected using GnuPG, MD5 and SHA1 (using the same method as .BR dpkg-sig (1) ), and .I debdelta-upgrade will refuse to apply corrupted or unsigned deltas (but, see option .B -A ). .SH DOWNLOAD AND PROXIES .I debdelta-upgrade downloads deltas in two passes: in the first pass, it downloads the first kB of each delta, to extract the delta parameters and to examine them. If the delta does not exist, it checks if the .I debdelta-too-big timestamp file exists, and then it examines the .I \-\-deb\-policy to decide if to download the deb instead. In the second pass, it downloads (the rest of) the deltas and queues them in a patching queue. For plain direct http downloads, .I debdelta-upgrade uses the Python library 'httplib' directly; in particular, it uses keep-alive features, and this speeds up a lot the first pass. If variables such as .I http_proxy are set in the environment, then instead .I debdelta-upgrade will use the 'urllib2' module: this though slows down sensibly the first pass above (since 'urllib2' opens a different connection for each download); moreover 'urllib2' seems to be buggy in some places (for example, it transforms http response "401" into "404" , and "302" into "200") so the output explanation printed by .I debdelta-upgrade in case of download error may be wrong; and the .I \-\-deb\-policy .I "big" is currently disabled (until the author understands 'urllib2' better). .SH EXIT STATUS See .BR debdelta (1) .SH "REPORTING BUGS" Report bugs to . .SH "SIZES" In the program printout kB = 1024 bytes and MB = 1024^2 bytes . .SH AUTHORS Debdelta was written and is copyright \(co 2006-09 Andrea Mennucci. .br This man page was written by Jegou Pierre-yves . .SH COPYING This is free software. You may redistribute copies of it under the terms of the GNU Library General Public License . There is NO WARRANTY, to the extent permitted by law. .SH "SEE ALSO" .BR debdelta (1), .BR apt-get (8), .BR /usr/share/doc/debdelta/README.upgrade , .BR /usr/share/doc/debdelta/README . debdelta/debpatch.sh0000644000000000000000000000300012436652141011613 0ustar #!/bin/bash -e # Copyright (C) 2008 Andrea Mennucci. # License: GNU Library General Public License, version 2 or later # this small script is capable of applying a deb delta, # it basically summarizes what "debpatch" does if test "$3" = "" ; then echo Provide 3 arguments: patchin fromfile tofile exit fi if test "$2" = / ; then echo Sorry, for this simple script you really need the old deb exit 1 fi case "$1" in /*) delta="$1" ;; *) delta=`pwd`/"$1" ;; esac olddeb="$2" newdeb="$3" TD=${TMPDIR:-/tmp}/debpatch$$ mkdir $TD $TD/OLD $TD/OLD/CONTROL $TD/OLD/DATA $TD/NEW $TD/NEW/CONTROL $TD/NEW/DATA $TD/PATCH ln -s '/usr/lib/debdelta/minigzip' '/usr/lib/debdelta/minibzip2' $TD/ ar p $olddeb control.tar.gz | tar -x -z -p -f - -C $TD/OLD/CONTROL if ar t $olddeb | grep -q data.tar.lzma ; then ar p $olddeb data.tar.lzma | unlzma -c | tar -x -p -f - -C $TD/OLD/DATA elif ar t $olddeb | grep -q data.tar.bz2 ; then ar p $olddeb data.tar.bz2 | tar -x --bzip2 -p -f - -C $TD/OLD/DATA else ar p $olddeb data.tar.gz | tar -x -z -p -f - -C $TD/OLD/DATA fi pushd $TD > /dev/null cd PATCH ar x "$delta" cd $TD if test -r PATCH/patch.sh.lzma ; then unlzma PATCH/patch.sh.lzma elif test -r PATCH/patch.sh.gz ; then gunzip PATCH/patch.sh.gz elif test -r PATCH/patch.sh.bz2 ; then bunzip2 PATCH/patch.sh.bz2 fi #dash will not work, see bug 379227 bash -e PATCH/patch.sh #note that we do not check MD5 in this simple script.. popd > /dev/null mv -vb $TD/NEW.file "$newdeb" #eventually, #rm -r "$TD"